diff --git beeline/src/java/org/apache/hive/beeline/Commands.java beeline/src/java/org/apache/hive/beeline/Commands.java index d179b37..390afc0 100644 --- beeline/src/java/org/apache/hive/beeline/Commands.java +++ beeline/src/java/org/apache/hive/beeline/Commands.java @@ -54,6 +54,7 @@ import java.util.TreeSet; import org.apache.hadoop.hive.common.cli.ShellCmdExecutor; +import org.apache.hadoop.hive.common.io.FetchConverter; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.SystemVariables; @@ -1002,6 +1003,15 @@ private boolean executeInternal(String sql, boolean call) { beeLine.showWarnings(); if (hasResults) { + OutputFile outputFile = beeLine.getRecordOutputFile(); + if (outputFile != null && outputFile.isActiveConverter()) { + beeLine.getRecordOutputFile().fetchStarted(); + if (!sql.trim().toLowerCase().startsWith("explain")) { + beeLine.getRecordOutputFile().foundQuery(true); + } else { + beeLine.getRecordOutputFile().foundQuery(false); + } + } do { ResultSet rs = stmnt.getResultSet(); try { @@ -1019,6 +1029,9 @@ private boolean executeInternal(String sql, boolean call) { rs.close(); } } while (BeeLine.getMoreResults(stmnt)); + if (outputFile != null && outputFile.isActiveConverter()) { + outputFile.fetchFinished(); + } } else { int count = stmnt.getUpdateCount(); long end = System.currentTimeMillis(); diff --git beeline/src/java/org/apache/hive/beeline/OutputFile.java beeline/src/java/org/apache/hive/beeline/OutputFile.java index 1014af3..63ddf19 100644 --- beeline/src/java/org/apache/hive/beeline/OutputFile.java +++ beeline/src/java/org/apache/hive/beeline/OutputFile.java @@ -23,17 +23,87 @@ package org.apache.hive.beeline; import java.io.File; -import java.io.FileWriter; import java.io.IOException; -import java.io.PrintWriter; +import java.io.PrintStream; + +import org.apache.hadoop.hive.common.io.DigestPrintStream; +import org.apache.hadoop.hive.common.io.FetchConverter; +import org.apache.hadoop.hive.common.io.SortAndDigestPrintStream; +import org.apache.hadoop.hive.common.io.SortPrintStream; public class OutputFile { - final File file; - final PrintWriter out; + private final File file; + private PrintStream out; + private boolean isActiveFetchConverter = false; public OutputFile(String filename) throws IOException { file = new File(filename); - out = new PrintWriter(new FileWriter(file)); + out = new PrintStream(file); + } + + /** + * Set converter for the output. Used only for testing. + * @param converter The type of the converter to use + * @throws Exception In case of an error in stream creation + */ + void setConverter(SupportedConverter converter) throws Exception { + switch (converter) { + case SORT_QUERY_RESULTS: + out = new SortPrintStream(out, "UTF-8"); + break; + case HASH_QUERY_RESULTS: + out = new DigestPrintStream(out, "UTF-8"); + break; + case SORT_AND_HASH_QUERY_RESULTS: + out = new SortAndDigestPrintStream(out, "UTF-8"); + break; + default: + // No wrapping is needed + } + isActiveFetchConverter = (out instanceof FetchConverter); + } + + /** + * Returns true if a FetchConverter is defined for writing the results. Should be used only for + * testing. + * @return True if a FetchConverter is active + */ + boolean isActiveConverter() { + return isActiveFetchConverter; + } + + /** + * Indicates that result fetching is started, and the converter should be activated. The + * Converter starts to collect the data when the fetch is started, and prints out the + * converted data when the fetch is finished. Converter will collect data only if + * fetchStarted, and foundQuery is true. + */ + void fetchStarted() { + if (isActiveFetchConverter) { + ((FetchConverter) out).fetchStarted(); + } + } + + /** + * Indicates that the following data will be a query result, and the converter should be + * activated. Converter will collect the data only if fetchStarted, and foundQuery is true. + * @param foundQuery The following data will be a query result (true) or not (false) + */ + void foundQuery(boolean foundQuery) { + if (isActiveFetchConverter) { + ((FetchConverter) out).foundQuery(foundQuery); + } + } + + /** + * Indicates that the previously collected data should be converted and written. Converter + * starts to collect the data when the fetch is started, and prints out the converted data when + * the fetch is finished. + */ + void fetchFinished() { + if (isActiveFetchConverter) { + ((FetchConverter) out).fetchFinished(); + } } @Override @@ -56,4 +126,14 @@ public void print(String command) { public void close() throws IOException { out.close(); } + + /** + * The supported type of converters. All of the points to a specific FetchConverter class. + */ + public enum SupportedConverter { + SORT_QUERY_RESULTS, + HASH_QUERY_RESULTS, + SORT_AND_HASH_QUERY_RESULTS, + NONE + } } diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index 8c7057c..f95c3ca 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; +import org.apache.hive.beeline.OutputFile.SupportedConverter; import org.apache.hive.beeline.QFile; import org.apache.hive.beeline.QFile.QFileBuilder; import org.apache.hive.beeline.QFileBeeLineClient; @@ -118,7 +119,8 @@ protected void runInfraScript(File script, File beeLineOutput, File log) "set test.script.dir=" + testScriptDirectory + ";", "!run " + script, }, - log); + log, + SupportedConverter.NONE); } catch (Exception e) { throw new SQLException("Error running infra script: " + script + "\nCheck the following logs for details:\n - " + beeLineOutput + "\n - " + log, e); diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java itests/util/src/main/java/org/apache/hive/beeline/QFile.java similarity index 94% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFile.java index 9fae194..cb4b6e4 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFile.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.StreamPrinter; +import org.apache.hive.beeline.OutputFile.SupportedConverter; import java.io.ByteArrayOutputStream; import java.io.File; @@ -69,6 +70,7 @@ private File afterExecuteLogFile; private static RegexFilterSet filterSet = getFilterSet(); private boolean rewriteSourceTables; + private SupportedConverter converter; private QFile() {} @@ -104,6 +106,10 @@ public File getAfterExecuteLogFile() { return afterExecuteLogFile; } + public SupportedConverter getConverter() { + return converter; + } + public String getDebugHint() { return String.format(DEBUG_HINT, inputFile, rawOutputFile, outputFile, expectedOutputFile, logFile, beforeExecuteLogFile, afterExecuteLogFile, @@ -330,6 +336,17 @@ public QFile getQFile(String name) throws IOException { result.beforeExecuteLogFile = new File(logDirectory, name + ".q.beforeExecute.log"); result.afterExecuteLogFile = new File(logDirectory, name + ".q.afterExecute.log"); result.rewriteSourceTables = rewriteSourceTables; + result.converter = SupportedConverter.NONE; + String input = FileUtils.readFileToString(result.inputFile, "UTF-8"); + if (input.contains("-- SORT_QUERY_RESULTS")) { + result.converter = SupportedConverter.SORT_QUERY_RESULTS; + } + if (input.contains("-- HASH_QUERY_RESULTS")) { + result.converter = SupportedConverter.HASH_QUERY_RESULTS; + } + if (input.contains("-- SORT_AND_HASH_QUERY_RESULTS")) { + result.converter = SupportedConverter.SORT_AND_HASH_QUERY_RESULTS; + } return result; } } diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java similarity index 87% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java rename to itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java index 7244bf8..a246d94 100644 --- itests/util/src/main/java/org/apache/hive/beeline/qfile/QFileBeeLineClient.java +++ itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java @@ -23,6 +23,8 @@ import java.io.PrintStream; import java.sql.SQLException; +import org.apache.hive.beeline.OutputFile.SupportedConverter; + /** * QFile test client using BeeLine. It can be used to submit a list of command strings, or a QFile. */ @@ -50,12 +52,14 @@ protected QFileBeeLineClient(String jdbcUrl, String jdbcDriver, String username, }); } - public void execute(String[] commands, File resultFile) throws SQLException { + public void execute(String[] commands, File resultFile, SupportedConverter converter) + throws Exception { beeLine.runCommands( new String[] { "!set outputformat csv", "!record " + resultFile.getAbsolutePath() }); + beeLine.getRecordOutputFile().setConverter(converter); int lastSuccessfulCommand = beeLine.runCommands(commands); if (commands.length != lastSuccessfulCommand) { @@ -65,7 +69,7 @@ public void execute(String[] commands, File resultFile) throws SQLException { beeLine.runCommands(new String[] {"!record"}); } - private void beforeExecute(QFile qFile) throws SQLException { + private void beforeExecute(QFile qFile) throws Exception { execute( new String[] { "USE default;", @@ -74,22 +78,24 @@ private void beforeExecute(QFile qFile) throws SQLException { "CREATE DATABASE `" + qFile.getName() + "`;", "USE `" + qFile.getName() + "`;" }, - qFile.getBeforeExecuteLogFile()); + qFile.getBeforeExecuteLogFile(), + SupportedConverter.NONE); } - private void afterExecute(QFile qFile) throws SQLException { + private void afterExecute(QFile qFile) throws Exception { execute( new String[] { "USE default;", "DROP DATABASE IF EXISTS `" + qFile.getName() + "` CASCADE;", }, - qFile.getAfterExecuteLogFile()); + qFile.getAfterExecuteLogFile(), + SupportedConverter.NONE); } - public void execute(QFile qFile) throws SQLException, IOException { + public void execute(QFile qFile) throws Exception { beforeExecute(qFile); String[] commands = beeLine.getCommands(qFile.getInputFile()); - execute(qFile.filterCommands(commands), qFile.getRawOutputFile()); + execute(qFile.filterCommands(commands), qFile.getRawOutputFile(), qFile.getConverter()); afterExecute(qFile); } diff --git itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java itests/util/src/main/java/org/apache/hive/beeline/package-info.java similarity index 100% rename from itests/util/src/main/java/org/apache/hive/beeline/qfile/package-info.java rename to itests/util/src/main/java/org/apache/hive/beeline/package-info.java diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out index 70a37ca..48d46ee 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out @@ -328,12 +328,12 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','NULL','NULL' '3','val_3','NULL','NULL' '4','val_4','NULL','NULL' '5','val_5','NULL','NULL' -'10','val_10','NULL','NULL' +'a.key','a.value','b.key','b.value' 5 rows selected >>> >>> explain @@ -428,11 +428,11 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key -'a.key','a.value','b.key','b.value' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' 'NULL','NULL','25','val_25' 'NULL','NULL','30','val_30' +'a.key','a.value','b.key','b.value' 4 rows selected >>> >>> explain @@ -527,16 +527,16 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','NULL','NULL' '3','val_3','NULL','NULL' '4','val_4','NULL','NULL' '5','val_5','NULL','NULL' -'10','val_10','NULL','NULL' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' 'NULL','NULL','25','val_25' 'NULL','NULL','30','val_30' +'a.key','a.value','b.key','b.value' 9 rows selected >>> >>> @@ -730,12 +730,12 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','NULL','NULL' '3','val_3','NULL','NULL' '4','val_4','NULL','NULL' '5','val_5','NULL','NULL' -'10','val_10','NULL','NULL' +'a.key','a.value','b.key','b.value' 5 rows selected >>> >>> explain @@ -830,11 +830,11 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key -'a.key','a.value','b.key','b.value' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' 'NULL','NULL','25','val_25' 'NULL','NULL','30','val_30' +'a.key','a.value','b.key','b.value' 4 rows selected >>> >>> explain @@ -929,16 +929,16 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','NULL','NULL' '3','val_3','NULL','NULL' '4','val_4','NULL','NULL' '5','val_5','NULL','NULL' -'10','val_10','NULL','NULL' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' 'NULL','NULL','25','val_25' 'NULL','NULL','30','val_30' +'a.key','a.value','b.key','b.value' 9 rows selected >>> >>> diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out index 22a2d6a..f7023a1 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out @@ -232,9 +232,9 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'4','val_4','4','val_4' '10','val_10','10','val_10' +'4','val_4','4','val_4' +'a.key','a.value','b.key','b.value' 2 rows selected >>> >>> explain @@ -329,12 +329,12 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a left outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','10','val_10' '3','val_3','NULL','NULL' '4','val_4','4','val_4' '5','val_5','NULL','NULL' -'10','val_10','10','val_10' +'a.key','a.value','b.key','b.value' 5 rows selected >>> >>> explain @@ -429,13 +429,13 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a right outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'4','val_4','4','val_4' '10','val_10','10','val_10' +'4','val_4','4','val_4' 'NULL','NULL','17','val_17' 'NULL','NULL','19','val_19' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' +'a.key','a.value','b.key','b.value' 6 rows selected >>> >>> explain @@ -530,16 +530,16 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_1 a full outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','10','val_10' '3','val_3','NULL','NULL' '4','val_4','4','val_4' '5','val_5','NULL','NULL' -'10','val_10','10','val_10' 'NULL','NULL','17','val_17' 'NULL','NULL','19','val_19' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' +'a.key','a.value','b.key','b.value' 9 rows selected >>> >>> @@ -638,9 +638,9 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'4','val_4','4','val_4' '10','val_10','10','val_10' +'4','val_4','4','val_4' +'a.key','a.value','b.key','b.value' 2 rows selected >>> >>> explain @@ -735,12 +735,12 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a left outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','10','val_10' '3','val_3','NULL','NULL' '4','val_4','4','val_4' '5','val_5','NULL','NULL' -'10','val_10','10','val_10' +'a.key','a.value','b.key','b.value' 5 rows selected >>> >>> explain @@ -835,13 +835,13 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a right outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'4','val_4','4','val_4' '10','val_10','10','val_10' +'4','val_4','4','val_4' 'NULL','NULL','17','val_17' 'NULL','NULL','19','val_19' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' +'a.key','a.value','b.key','b.value' 6 rows selected >>> >>> explain @@ -936,16 +936,16 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_1 a full outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '1','val_1','NULL','NULL' +'10','val_10','10','val_10' '3','val_3','NULL','NULL' '4','val_4','4','val_4' '5','val_5','NULL','NULL' -'10','val_10','10','val_10' 'NULL','NULL','17','val_17' 'NULL','NULL','19','val_19' 'NULL','NULL','20','val_20' 'NULL','NULL','23','val_23' +'a.key','a.value','b.key','b.value' 9 rows selected >>> >>> diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out index 6c9b8e4..2567ea6 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out @@ -231,9 +231,9 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_2 a join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '20','val_20','20','val_20' '23','val_23','23','val_23' +'a.key','a.value','b.key','b.value' 2 rows selected >>> >>> explain @@ -328,11 +328,11 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_2 a left outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '20','val_20','20','val_20' '23','val_23','23','val_23' '25','val_25','NULL','NULL' '30','val_30','NULL','NULL' +'a.key','a.value','b.key','b.value' 4 rows selected >>> >>> explain @@ -427,13 +427,13 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_2 a right outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'NULL','NULL','4','val_4' +'20','val_20','20','val_20' +'23','val_23','23','val_23' 'NULL','NULL','10','val_10' 'NULL','NULL','17','val_17' 'NULL','NULL','19','val_19' -'20','val_20','20','val_20' -'23','val_23','23','val_23' +'NULL','NULL','4','val_4' +'a.key','a.value','b.key','b.value' 6 rows selected >>> >>> explain @@ -528,15 +528,15 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(a)*/ * from smb_bucket_2 a full outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'NULL','NULL','4','val_4' -'NULL','NULL','10','val_10' -'NULL','NULL','17','val_17' -'NULL','NULL','19','val_19' '20','val_20','20','val_20' '23','val_23','23','val_23' '25','val_25','NULL','NULL' '30','val_30','NULL','NULL' +'NULL','NULL','10','val_10' +'NULL','NULL','17','val_17' +'NULL','NULL','19','val_19' +'NULL','NULL','4','val_4' +'a.key','a.value','b.key','b.value' 8 rows selected >>> >>> @@ -635,9 +635,9 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_2 a join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '20','val_20','20','val_20' '23','val_23','23','val_23' +'a.key','a.value','b.key','b.value' 2 rows selected >>> >>> explain @@ -732,11 +732,11 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_2 a left outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' '20','val_20','20','val_20' '23','val_23','23','val_23' '25','val_25','NULL','NULL' '30','val_30','NULL','NULL' +'a.key','a.value','b.key','b.value' 4 rows selected >>> >>> explain @@ -831,13 +831,13 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_2 a right outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'NULL','NULL','4','val_4' +'20','val_20','20','val_20' +'23','val_23','23','val_23' 'NULL','NULL','10','val_10' 'NULL','NULL','17','val_17' 'NULL','NULL','19','val_19' -'20','val_20','20','val_20' -'23','val_23','23','val_23' +'NULL','NULL','4','val_4' +'a.key','a.value','b.key','b.value' 6 rows selected >>> >>> explain @@ -932,15 +932,15 @@ INFO : Total MapReduce CPU Time Spent: 0 msec INFO : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds INFO : OK DEBUG : Shutting down query select /*+mapjoin(b)*/ * from smb_bucket_2 a full outer join smb_bucket_3 b on a.key = b.key -'a.key','a.value','b.key','b.value' -'NULL','NULL','4','val_4' -'NULL','NULL','10','val_10' -'NULL','NULL','17','val_17' -'NULL','NULL','19','val_19' '20','val_20','20','val_20' '23','val_23','23','val_23' '25','val_25','NULL','NULL' '30','val_30','NULL','NULL' +'NULL','NULL','10','val_10' +'NULL','NULL','17','val_17' +'NULL','NULL','19','val_19' +'NULL','NULL','4','val_4' +'a.key','a.value','b.key','b.value' 8 rows selected >>> >>>