diff --git data/files/Calcs.tbl data/files/Calcs.tbl
new file mode 100755
index 0000000..668282f
--- /dev/null
+++ data/files/Calcs.tbl
@@ -0,0 +1,17 @@
+key09\N12.411.5-10.56\NTECHNOLOGYBUSINESS COPIERStene83-92\Ntruefalse\N\N2004-04-101998-08-12\N1900-01-01 20:36:00\N2004-07-23 21:13:37\Nj
+key10\N10.326.8-4.7919.39TECHNOLOGYCD-R MEDIAelevene4\N-311truetruefalse\N\N2004-04-111974-03-171999-08-201900-01-01 01:31:3200:05:572004-07-14 08:16:44k
+key11\N2.473.79-10.813.82TECHNOLOGYCONFERENCE PHONEStwelve\N10-8-42falsetruetrue\N\N2004-04-121994-04-20\N1899-12-30 22:15:4004:40:492004-07-25 15:22:26\Nl
+key12\N12.05\N-6.623.38TECHNOLOGYCORDED KEYBOARDS\N\N\N\N011\Nfalsetruetrue\N2004-04-132001-02-04\N1900-01-01 13:53:4604:48:072004-07-17 14:01:56m
+key13\N10.3713.04-18.43\NTECHNOLOGYCORDLESS KEYBOARDSfourteen\N4\N418\Nfalsetruetrue\N2004-04-141988-01-051996-05-131900-01-01 04:57:51\N2004-07-19 22:21:31n
+key14\N7.1\N6.84-14.21TECHNOLOGYDOT MATRIX PRINTERSfifteene11\N-818truefalsetrue\N\N2004-04-151972-07-121986-11-081899-12-30 22:42:4318:58:412004-07-31 11:57:52\No
+key15\N16.8110.98-10.986.75TECHNOLOGYDVDsixteene4\N-911false\Nfalsetrue\N2004-04-161995-06-04\N1899-12-30 22:24:08\N2004-07-14 07:43:00p
+key16\N7.127.87-2.6\NTECHNOLOGYERICSSON\N\N8-960\N\Nfalse\N\N2004-04-172002-04-271992-01-181900-01-01 11:58:2912:33:572004-07-28 12:34:28q
+key0012.38.4217.86-11.52\NFURNITURECLAMP ON LAMPSonee1-358truetruefalsetrue2004-04-152004-04-011977-04-201986-03-201899-12-30 21:07:3219:36:222004-07-09 10:17:35\Na
+key01-12.36.7116.73-9.3110.85FURNITURECLOCKStwoe\N-6-413falsetruefalse\N1972-07-042004-04-021995-09-03\N1900-01-01 13:48:4802:05:252004-07-26 12:30:34\Nb
+key0215.79.78\N-12.17-13.47OFFICE SUPPLIESAIR PURIFIERSthreee\N\N52\Ntruefalse\N1975-11-122004-04-031997-09-191997-02-021900-01-01 18:21:0809:33:312004-08-02 07:59:23c
+key03-15.77.438.51-7.25-6.05OFFICE SUPPLIESBINDER ACCESSORIES\Ne\N-4-55truefalsefalse\N2004-06-042004-04-041980-07-26\N1900-01-01 18:51:4822:50:162004-07-05 13:14:20d
+key043.59.056.4612.938.32OFFICE SUPPLIESBINDER CLIPSfive\N7\N39falsefalsetruetrue2004-06-192004-04-051997-05-301996-03-071900-01-01 15:01:19\N2004-07-28 23:30:22e
+key05-3.59.388.98-19.9610.71OFFICE SUPPLIESBINDING MACHINESsix\N3\N27\Nfalsetruefalse\N2004-04-061980-11-071979-04-011900-01-01 08:59:3919:57:332004-07-22 00:30:23\Nf
+key060.016.4211.6910.93\NOFFICE SUPPLIESBINDING SUPPLIES\Ne8\N918true\Nfalse\N\N2004-04-071977-02-08\N1900-01-01 07:37:48\N2004-07-28 06:54:50g
+key07\N11.3817.253.64-10.24OFFICE SUPPLIESBUSINESS ENVELOPESeighte\N203false\Ntruefalse\N2004-04-081974-05-03\N1900-01-01 19:45:5419:48:232004-07-12 17:30:16h
+key0810.09.47\N-13.384.77TECHNOLOGYANSWERING MACHINESnine\N\N3-617\N\Nfalsefalse\N2004-04-091976-09-091983-05-221900-01-01 09:00:5922:20:142004-07-04 22:49:28i
diff --git data/scripts/q_test_cleanup_druid.sql data/scripts/q_test_cleanup_druid.sql
index b0d3425..67a4d86 100644
--- data/scripts/q_test_cleanup_druid.sql
+++ data/scripts/q_test_cleanup_druid.sql
@@ -1,2 +1,4 @@
DROP TABLE IF EXISTS alltypesorc;
DROP TABLE IF EXISTS druid_table;
+DROP TABLE IF EXISTS calcs_orc;
+DROP TABLE IF EXISTS calcs;
diff --git data/scripts/q_test_druid_init.sql data/scripts/q_test_druid_init.sql
index ee025f1..4c8be0e 100644
--- data/scripts/q_test_druid_init.sql
+++ data/scripts/q_test_druid_init.sql
@@ -25,5 +25,75 @@ ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
--- Druid Table
+--
+-- TABLEAU TABLE
+--
+
+drop table if exists Calcs_txt;
+CREATE TABLE Calcs_txt (
+ key STRING,
+ num0 DOUBLE,
+ num1 DOUBLE,
+ num2 DOUBLE,
+ num3 DOUBLE,
+ num4 DOUBLE,
+ str0 STRING,
+ str1 STRING,
+ str2 STRING,
+ str3 STRING,
+ int0 INT,
+ int1 INT,
+ int2 INT,
+ int3 INT,
+ bool0 BOOLEAN,
+ bool1 BOOLEAN,
+ bool2 BOOLEAN,
+ bool3 BOOLEAN,
+ date0 STRING,
+ date1 STRING,
+ date2 STRING,
+ date3 STRING,
+ time0 STRING,
+ time1 STRING,
+ datetime0 STRING,
+ datetime1 STRING,
+ zzz STRING
+) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/Calcs.tbl" OVERWRITE INTO TABLE Calcs_txt;
+
+drop table if exists calcs_orc;
+CREATE TABLE calcs_orc (
+ key STRING,
+ num0 DOUBLE,
+ num1 DOUBLE,
+ num2 DOUBLE,
+ num3 DOUBLE,
+ num4 DOUBLE,
+ str0 STRING,
+ str1 STRING,
+ str2 STRING,
+ str3 STRING,
+ int0 INT,
+ int1 INT,
+ int2 INT,
+ int3 INT,
+ bool0 BOOLEAN,
+ bool1 BOOLEAN,
+ bool2 BOOLEAN,
+ bool3 BOOLEAN,
+ date0 STRING,
+ date1 STRING,
+ date2 STRING,
+ date3 STRING,
+ time0 STRING,
+ time1 STRING,
+ datetime0 STRING,
+ datetime1 STRING,
+ zzz STRING
+)
+STORED AS orc tblproperties ("orc.compress"="SNAPPY");
+insert overwrite table calcs_orc select * from Calcs_txt;
+DROP TABLE IF EXISTS Calcs_txt;
+ANALYZE TABLE calcs_orc COMPUTE STATISTICS;
diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties
index 4a52eb5..a9c8efe 100644
--- itests/src/test/resources/testconfiguration.properties
+++ itests/src/test/resources/testconfiguration.properties
@@ -1657,5 +1657,7 @@ druid.query.files=druidmini_test1.q,\
druidmini_test_insert.q,\
druidmini_mv.q,\
druid_timestamptz.q, \
- druidmini_dynamic_partition.q
+ druidmini_dynamic_partition.q, \
+ druidmini_tableau_query.q, \
+ druidmini_tableau_explain.q
diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
index d38810f..0b12928 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCliConfig.java
@@ -43,9 +43,9 @@
public static final String HIVE_ROOT = HiveTestEnvSetup.HIVE_ROOT;
- public static enum MetastoreType {
+ enum MetastoreType {
sql
- };
+ }
private MetastoreType metastoreType = MetastoreType.sql;
private String queryFile;
diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 566d02b..7034c38 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -177,7 +177,7 @@ public MiniDruidCliConfig() {
setInitScript("q_test_druid_init.sql");
setCleanupScript("q_test_cleanup_druid.sql");
- setHiveConfDir("");
+ setHiveConfDir("data/conf/llap");
setClusterType(MiniClusterType.druid);
setMetastoreType(MetastoreType.sql);
setFsType(QTestUtil.FsType.hdfs);
diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 6cd7a13..45602a2 100644
--- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -456,8 +456,7 @@ private void createRemoteDirs() {
private enum CoreClusterType {
MR,
TEZ,
- SPARK,
- DRUID
+ SPARK
}
public enum FsType {
@@ -476,7 +475,7 @@ private void createRemoteDirs() {
llap(CoreClusterType.TEZ, FsType.hdfs),
llap_local(CoreClusterType.TEZ, FsType.local),
none(CoreClusterType.MR, FsType.local),
- druid(CoreClusterType.DRUID, FsType.hdfs);
+ druid(CoreClusterType.TEZ, FsType.hdfs);
private final CoreClusterType coreClusterType;
@@ -648,6 +647,27 @@ private void setupMiniCluster(HadoopShims shims, String confDir) throws
String uriString = fs.getUri().toString();
+ if (clusterType == MiniClusterType.druid) {
+ final String tempDir = System.getProperty("test.tmp.dir");
+ druidCluster = new MiniDruidCluster("mini-druid",
+ getLogDirectory(),
+ tempDir,
+ setup.zkPort,
+ Utilities.jarFinderGetJar(MiniDruidCluster.class)
+ );
+ final Path druidDeepStorage = fs.makeQualified(new Path(druidCluster.getDeepStorageDir()));
+ fs.mkdirs(druidDeepStorage);
+ conf.set("hive.druid.storage.storageDirectory", druidDeepStorage.toUri().getPath());
+ conf.set("hive.druid.metadata.db.type", "derby");
+ conf.set("hive.druid.metadata.uri", druidCluster.getMetadataURI());
+ final Path scratchDir = fs
+ .makeQualified(new Path(System.getProperty("test.tmp.dir"), "druidStagingDir"));
+ fs.mkdirs(scratchDir);
+ conf.set("hive.druid.working.directory", scratchDir.toUri().getPath());
+ druidCluster.init(conf);
+ druidCluster.start();
+ }
+
if (clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
if (confDir != null && !confDir.isEmpty()) {
conf.addResource(new URL("file://" + new File(confDir).toURI().getPath()
@@ -668,18 +688,6 @@ private void setupMiniCluster(HadoopShims shims, String confDir) throws
mr = shims.getMiniSparkCluster(conf, 2, uriString, 1);
} else if (clusterType == MiniClusterType.mr) {
mr = shims.getMiniMrCluster(conf, 2, uriString, 1);
- } else if (clusterType == MiniClusterType.druid) {
- final String tempDir = System.getProperty("test.tmp.dir");
- druidCluster = new MiniDruidCluster("mini-druid",
- getLogDirectory(),
- tempDir,
- setup.zkPort,
- Utilities.jarFinderGetJar(MiniDruidCluster.class)
- );
- druidCluster.init(conf);
- final Path druidDeepStorage = fs.makeQualified(new Path(druidCluster.getDeepStorageDir()));
- fs.mkdirs(druidDeepStorage);
- druidCluster.start();
}
}
diff --git pom.xml pom.xml
index a242fbf..7222e4d 100644
--- pom.xml
+++ pom.xml
@@ -1285,7 +1285,7 @@
${test.warehouse.scheme}${test.warehouse.dir}
true
- src,src1,srcbucket,srcbucket2,src_json,src_thrift,src_sequencefile,srcpart,alltypesorc,alltypesparquet,src_hbase,cbo_t1,cbo_t2,cbo_t3,src_cbo,part,lineitem
+ src,src1,srcbucket,srcbucket2,src_json,src_thrift,src_sequencefile,srcpart,alltypesorc,alltypesparquet,src_hbase,cbo_t1,cbo_t2,cbo_t3,src_cbo,part,lineitem,calcs_orc,calcs_txt
${test.conf.dir}/krb5.conf
${antlr.version}
diff --git ql/src/test/queries/clientpositive/druidmini_tableau_explain.q ql/src/test/queries/clientpositive/druidmini_tableau_explain.q
new file mode 100644
index 0000000..999b927
--- /dev/null
+++ ql/src/test/queries/clientpositive/druidmini_tableau_explain.q
@@ -0,0 +1,433 @@
+create database druid_tableau;
+use druid_tableau;
+drop table if exists calcs;
+create table calcs
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES (
+ "druid.segment.granularity" = "MONTH",
+ "druid.query.granularity" = "DAY")
+AS SELECT
+ cast(datetime0 as timestamp with local time zone) `__time`,
+ key,
+ str0, str1, str2, str3,
+ date0, date1, date2, date3,
+ time0, time1,
+ datetime1,
+ zzz,
+ cast(bool0 as string) bool0,
+ cast(bool1 as string) bool1,
+ cast(bool2 as string) bool2,
+ cast(bool3 as string) bool3,
+ int0, int1, int2, int3,
+ num0, num1, num2, num3, num4
+from default.calcs_orc;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_ FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0);
+
+EXPLAIN SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) - YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) AS BIGINT) AS sum_z_now_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS temp_z_min_str2___3992540197__0_, LENGTH(Calcs.str2) AS min_len_str2__ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num0 AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_weekday_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.`__time`) + CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR('2004-07-04') + CAST((MONTH('2004-07-04') - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.num0 AS temp_z_var_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_var_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_var_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, COALESCE(Calcs.date0, '2010-04-12') AS none_z_ifnull_date_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT))) AS none_z_datetrunc_week_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 = 'sixteen') OR (Calcs.str2 IS NULL)) GROUP BY Calcs.str2 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, CASE WHEN 3 >= 0 THEN SUBSTRING(Calcs.str2,1,CAST(3 AS INT)) ELSE NULL END AS none_z_left_str_num_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT))) AS none_z_datetrunc_month_ok, MONTH(Calcs.`__time`) AS none_z_month_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.num0 AS temp_z_stdevp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdevp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdevp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS stp_num0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, (CASE WHEN Calcs.int2 = 0 THEN NULL ELSE CAST(Calcs.int3 AS DOUBLE) / Calcs.int2 END) AS sum_z_int_div_zero_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 > Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_max_date_date_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int3 AS sum_int3_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, true AS none_z_true_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00') AS none_z_datetrunc_day_ok, DAY(Calcs.`__time`) AS none_z_day_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_str_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END AS none_z_left_str_negative_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) AS none_z_case_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') AS tyr_date0_ok FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, LOWER(Calcs.str1) AS none_z_lower_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_date_str_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'two')) GROUP BY Calcs.str2 ;
+
+EXPLAIN SELECT CAST(CAST(Calcs.num4 AS BIGINT) AS STRING) AS none_b21622_nk, Calcs.key AS none_key_nk, SUM(Calcs.num4) AS sum_num4_ok FROM druid_tableau.calcs Calcs GROUP BY CAST(CAST(Calcs.num4 AS BIGINT) AS STRING), Calcs.key ;
+
+EXPLAIN SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, CONCAT(Calcs.date3, ' 00:00:00') AS none_z_date_date_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.num0 AS temp_z_varp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_varp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_varp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS vrp_num0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2 ;
+
+--@TODO issue with Unsafe compares between different types are disabled for safety reasons
+
+--EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (CASE WHEN Calcs.num1 < 0 AND FLOOR((0.10000000000000001 * Calcs.num0)) <> (0.10000000000000001 * Calcs.num0) THEN NULL ELSE POW(Calcs.num1,(0.10000000000000001 * Calcs.num0)) END) AS sum_z_power_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 < Calcs.num1) AS none_z_num_lt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT COUNT(Calcs.str2) AS cnt_str2_ok, COUNT(Calcs.str2) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT * FROM druid_tableau.calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) AS none_z_if_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 <= '1975-11-12') AS none_z_date_le_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM(IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.num0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.num1,Calcs.num2))) AS sum_z_case_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2 ;
+
+EXPLAIN SELECT COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 - Calcs.num1) AS sum_z_num_minus_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ACOS((CASE WHEN 20 = 0 THEN NULL ELSE Calcs.num0 / 20 END)) AS sum_z_acos_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_le_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_second_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((12 * YEAR(Calcs.`__time`) + MONTH(Calcs.`__time`)) - (12 * YEAR('2004-07-04') + MONTH('2004-07-04')) AS BIGINT) AS sum_z_datediff_month_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 ASC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LN(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_ln_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)) AS none_calculation_0390402194730773_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END) AS none_calculation_2810402194531916_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_3240402194650458_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END) AS none_calculation_8020402194436198_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_8130402194627126_ok, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') AS none_calculation_8720402194759281_ok FROM druid_tableau.calcs Calcs WHERE (Calcs.key = 'key00' OR Calcs.key = 'key01') GROUP BY DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))), FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 > Calcs.num1) AS none_z_num_gt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 > '1975-11-12') AS none_z_date_gt_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, CONCAT(Calcs.str2,Calcs.str3) AS none_z_str_plus_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00') AS none_z_datetrunc_year_ok, YEAR(Calcs.`__time`) AS none_z_year_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS ctd_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_countd_date3__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, EXP((0.10000000000000001 * Calcs.num0)) AS sum_z_exp_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.num1 ELSE Calcs.num2 END)) AS sum_z_if_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2 ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) - YEAR('2004-07-04') AS BIGINT) AS sum_z_datediff_year_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 >= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ge_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, (1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) AS sum_z_datepart_weekday_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) - YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) AS BIGINT) AS sum_z_today_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, MIN(Calcs.int0) AS min_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DAY(Calcs.`__time`) AS none_z_day_ok, DAY(Calcs.`__time`) AS sum_z_datepart_day_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq2_num_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq_num_nk, ABS(Calcs.num0) AS sum_abs_num0__ok, Calcs.num0 AS sum_num0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS cnt_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, Calcs.str2 RLIKE CONCAT('.*', Calcs.str3, '.*') AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(DISTINCT Calcs.num0) AS ctd_num0_ok, COUNT(DISTINCT Calcs.num0) AS usr_z_countd_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_dayofyear_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num4 AS sum_num4_ok, ROUND(Calcs.num4,1) AS sum_z_round_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date2 AS none_date2_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.date3) + CAST((MONTH(Calcs.date3) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR(Calcs.date2) + CAST((MONTH(Calcs.date2) - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.num4 AS STRING) AS none_z_str_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) < Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_min_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_z_len_str_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ROUND(Calcs.num0) AS sum_z_round_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2) AS daydiffs1__bin_, FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3) AS daydiffs2__bin_, FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4) AS daydiffs3__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2) AS yeardiffs1__bin_, FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3) AS yeardiffs2__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) AS yeardiffs3__bin_ FROM druid_tableau.calcs Calcs GROUP BY FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2), FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3), FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2), FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_weekday_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (1 IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS STRING) AS none_z_datename_quarter_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) AS str2__group_, AVG(Calcs.num0) AS avg_num0_ok, COUNT(Calcs.num0) AS cnt_num0_ok, SUM(Calcs.num0) AS sum_num0_ok FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) ;
+
+EXPLAIN SELECT SUM((((((((((CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT))) AS sum_maxint_sum_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num1 AS none_num1_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num1 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 = (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_eq_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) AS none_z_if_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) / LOG10(2) ELSE NULL END) AS sum_z_log2_num_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_log_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ABS(Calcs.num0) AS sum_z_abs_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.num0 AS temp_z_stdev_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdev_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdev_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) AS none_z_case_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS STRING) AS none_z_datename_dayofyear_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT COUNT(Calcs.date3) AS cnt_date3_ok, COUNT(Calcs.date3) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num4 IS NULL) AS none_z_isnull_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS sum_z_float_str_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(SECOND(Calcs.`__time`), 0), 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_second_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, floor((datediff(Calcs.`__time`,'1995-01-01') - ( pmod(datediff(Calcs.`__time`, '1995-01-01'), 7) + 1) - datediff('2004-07-04','1995-01-01') + (pmod(datediff('2004-07-04', '1995-01-01'), 7) + 1))/7) AS sum_z_datediff_week_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs WHERE (YEAR(Calcs.date0) IS NULL) LIMIT 1 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, COALESCE(Calcs.str2, 'i\'m null') AS none_z_ifnull_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_month_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS sum_z_datepart_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) > Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_max_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs WHERE (NOT ((Calcs.str2 IS NULL) OR ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'six')))) GROUP BY Calcs.str2 ;
+
+EXPLAIN SELECT MONTH(Calcs.`__time`) AS mn_datetime0_ok FROM druid_tableau.calcs Calcs WHERE (MONTH(Calcs.`__time`) IS NULL) LIMIT 1 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, Calcs.str1 RLIKE CONCAT('.*', 'IN', '.*') AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), CAST((-1) AS DOUBLE)) AS none_z_ifnull_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) ;
+
+EXPLAIN SELECT (32000 + Calcs.num4) AS none_bignum_ok FROM druid_tableau.calcs Calcs GROUP BY (32000 + Calcs.num4) ;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_hour_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) IS NULL) OR (LENGTH('ES') IS NULL) THEN NULL WHEN LENGTH('ES') < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str1),CAST(1 AS INT),CAST(LENGTH('ES') AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str1),CAST((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) AS INT),CAST(LENGTH('ES') AS INT)) END) = 'ES' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str2),CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str2),CAST((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, UPPER(Calcs.str2) AS none_z_upper_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), 0.0) AS none_z_zn_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) AS str2__group__1, SUM(1) AS sum_number_of_records_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, CASE WHEN Calcs.int2 = 0 THEN NULL ELSE ( Calcs.int3 / Calcs.int2 ) END AS sum_z_div_int_zero_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.num0 AS min_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, SECOND(Calcs.`__time`) AS sum_z_datepart_second_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS sum_z_datepart_dayofyear_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 = '1972-07-04') AS none_z_date_eq_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_day_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) * 60 + COALESCE(SECOND(Calcs.`__time`), 0) - COALESCE(SECOND('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_second_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) AS none_b11703_nk FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN (1 IS NULL) OR (LENGTH('BI') IS NULL) THEN NULL WHEN LENGTH('BI') < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) ELSE SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) END) = 'BI' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_minute_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') ;
+
+EXPLAIN SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS cnt_str2_ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 < Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_min_date_date_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 >= '1975-11-12') AS none_z_date_ge_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (Calcs.str2 IS NULL) AS none_z_isnull_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) AS STRING) AS none_z_datename_year_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) AS none_z_case_null_null_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date1, Calcs.key, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) ;
+
+EXPLAIN SELECT Calcs.num0 AS temp_z_avg_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_avg_num0___2730138885__0_, Calcs.num0 AS avg_num0_ok, Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_dayofyear_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 < 0 THEN CAST(NULL AS DOUBLE) ELSE SQRT(Calcs.num0) END) AS sum_z_sqrt_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 > Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_max_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) AS none_z_dateadd_year_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (-Calcs.num0) AS sum_z_neg_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 < Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_min_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DAY(Calcs.`__time`) AS STRING) AS none_z_datename_day_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, SECOND(Calcs.time1) AS sum_z_timepart_second_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 IS NULL) AS none_z_isnull_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int1 AS sum_int1_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.int0) AS sum_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, false AS none_z_false_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2 ORDER BY none_str2_nk ASC ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MONTH(Calcs.`__time`) AS none_z_month_ok, MONTH(Calcs.`__time`) AS sum_z_datepart_month_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 DESC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 + Calcs.num1) AS sum_z_num_plus_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_minute_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS BIGINT) AS sum_z_int_str_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, COS(Calcs.num0) AS sum_z_cos_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.num0 AS max_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, HOUR(Calcs.`__time`) AS sum_z_datepart_hour_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str2 AS temp_z_max_str2___3598104523__0_, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_len_str2__ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) AS str2__bin_ FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <> (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ne_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT DAY(Calcs.date1) AS dy_date1_ok FROM druid_tableau.calcs Calcs WHERE (DAY(Calcs.date1) IS NULL) LIMIT 1 ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(HOUR(Calcs.`__time`) AS STRING), '') AS none_z_datename_hour_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(SECOND(Calcs.`__time`) AS STRING), '') AS none_z_datename_second_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_hour_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num2 AS none_num2_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num2 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2 ;
+
+EXPLAIN SELECT TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) AS none_date_datetime0__ok, COUNT(Calcs.key) AS cnt_key_ok FROM druid_tableau.calcs Calcs GROUP BY TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, CONCAT(CONCAT(' ',Calcs.str2),' ') AS none_padded_str2_nk, CONCAT(CONCAT('|',RTRIM(CONCAT(CONCAT(' ',Calcs.str2),' '))),'|') AS none_z_rtrim_str_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_minute_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, HOUR(Calcs.time1) AS sum_z_timepart_hour_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs WHERE (Calcs.key IS NULL) LIMIT 1 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, POW(Calcs.num0,2) AS sum_z_square_num_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), (CASE WHEN MONTH(Calcs.`__time`)<4 THEN '-01' WHEN MONTH(Calcs.`__time`)<7 THEN '-04' WHEN MONTH(Calcs.`__time`)<10 THEN '-07' ELSE '-10' END), '-01 00:00:00') AS none_z_datetrunc_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MINUTE(Calcs.`__time`) AS sum_z_datepart_minute_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 < '1975-11-12') AS none_z_date_lt_date_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.str0 AS none_str0_nk, 'CONST' AS none_z_const_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str0 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, Calcs.int1 AS sum_int1_ok, CASE WHEN Calcs.int1 = 0 THEN NULL ELSE ( Calcs.int0 / Calcs.int1 ) END AS sum_z_div_int_int_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 ELSE Calcs.num1 END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, MINUTE(Calcs.time1) AS sum_z_timepart_minute_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, YEAR(Calcs.`__time`) AS none_z_year_ok, YEAR(Calcs.`__time`) AS sum_z_datepart_year_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 > (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str0 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_gt_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(MINUTE(Calcs.`__time`) AS STRING), '') AS none_z_datename_minute_nk FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 >= Calcs.num1) AS none_z_num_ge_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str2 AS none_z_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2 ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num0 AS DOUBLE) WHEN NOT (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num1 AS DOUBLE) ELSE NULL END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1 ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_quarter_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END ;
+
+EXPLAIN SELECT MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs ;
diff --git ql/src/test/queries/clientpositive/druidmini_tableau_query.q ql/src/test/queries/clientpositive/druidmini_tableau_query.q
new file mode 100644
index 0000000..4075304
--- /dev/null
+++ ql/src/test/queries/clientpositive/druidmini_tableau_query.q
@@ -0,0 +1,433 @@
+create database druid_tableau;
+use druid_tableau;
+drop table if exists calcs;
+create table calcs
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES (
+ "druid.segment.granularity" = "MONTH",
+ "druid.query.granularity" = "DAY")
+AS SELECT
+ cast(datetime0 as timestamp with local time zone) `__time`,
+ key,
+ str0, str1, str2, str3,
+ date0, date1, date2, date3,
+ time0, time1,
+ datetime1,
+ zzz,
+ cast(bool0 as string) bool0,
+ cast(bool1 as string) bool1,
+ cast(bool2 as string) bool2,
+ cast(bool3 as string) bool3,
+ int0, int1, int2, int3,
+ num0, num1, num2, num3, num4
+from default.calcs_orc;
+
+SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_ FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0);
+
+SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, CAST(YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) - YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) AS BIGINT) AS sum_z_now_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS temp_z_min_str2___3992540197__0_, LENGTH(Calcs.str2) AS min_len_str2__ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num0 AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_weekday_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.`__time`) + CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR('2004-07-04') + CAST((MONTH('2004-07-04') - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.num0 AS temp_z_var_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_var_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_var_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, COALESCE(Calcs.date0, '2010-04-12') AS none_z_ifnull_date_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT))) AS none_z_datetrunc_week_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 = 'sixteen') OR (Calcs.str2 IS NULL)) GROUP BY Calcs.str2 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, CASE WHEN 3 >= 0 THEN SUBSTRING(Calcs.str2,1,CAST(3 AS INT)) ELSE NULL END AS none_z_left_str_num_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT))) AS none_z_datetrunc_month_ok, MONTH(Calcs.`__time`) AS none_z_month_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.num0 AS temp_z_stdevp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdevp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdevp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS stp_num0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, (CASE WHEN Calcs.int2 = 0 THEN NULL ELSE CAST(Calcs.int3 AS DOUBLE) / Calcs.int2 END) AS sum_z_int_div_zero_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 > Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_max_date_date_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int3 AS sum_int3_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, true AS none_z_true_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00') AS none_z_datetrunc_day_ok, DAY(Calcs.`__time`) AS none_z_day_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_str_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END AS none_z_left_str_negative_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) AS none_z_case_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') AS tyr_date0_ok FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, LOWER(Calcs.str1) AS none_z_lower_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_date_str_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'two')) GROUP BY Calcs.str2 ;
+
+SELECT CAST(CAST(Calcs.num4 AS BIGINT) AS STRING) AS none_b21622_nk, Calcs.key AS none_key_nk, SUM(Calcs.num4) AS sum_num4_ok FROM druid_tableau.calcs Calcs GROUP BY CAST(CAST(Calcs.num4 AS BIGINT) AS STRING), Calcs.key ;
+
+SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, CONCAT(Calcs.date3, ' 00:00:00') AS none_z_date_date_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.num0 AS temp_z_varp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_varp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_varp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS vrp_num0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2 ;
+
+--@TODO issue with Unsafe compares between different types are disabled for safety reasons
+
+--SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (CASE WHEN Calcs.num1 < 0 AND FLOOR((0.10000000000000001 * Calcs.num0)) <> (0.10000000000000001 * Calcs.num0) THEN NULL ELSE POW(Calcs.num1,(0.10000000000000001 * Calcs.num0)) END) AS sum_z_power_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, (Calcs.num0 < Calcs.num1) AS none_z_num_lt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT COUNT(Calcs.str2) AS cnt_str2_ok, COUNT(Calcs.str2) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT * FROM druid_tableau.calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) AS none_z_if_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 <= '1975-11-12') AS none_z_date_le_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM(IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.num0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.num1,Calcs.num2))) AS sum_z_case_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2 ;
+
+SELECT COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 - Calcs.num1) AS sum_z_num_minus_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ACOS((CASE WHEN 20 = 0 THEN NULL ELSE Calcs.num0 / 20 END)) AS sum_z_acos_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_le_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_second_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((12 * YEAR(Calcs.`__time`) + MONTH(Calcs.`__time`)) - (12 * YEAR('2004-07-04') + MONTH('2004-07-04')) AS BIGINT) AS sum_z_datediff_month_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 ASC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LN(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_ln_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)) AS none_calculation_0390402194730773_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END) AS none_calculation_2810402194531916_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_3240402194650458_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END) AS none_calculation_8020402194436198_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_8130402194627126_ok, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') AS none_calculation_8720402194759281_ok FROM druid_tableau.calcs Calcs WHERE (Calcs.key = 'key00' OR Calcs.key = 'key01') GROUP BY DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))), FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') ;
+
+SELECT Calcs.key AS none_key_nk, (Calcs.num0 > Calcs.num1) AS none_z_num_gt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 > '1975-11-12') AS none_z_date_gt_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, CONCAT(Calcs.str2,Calcs.str3) AS none_z_str_plus_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00') AS none_z_datetrunc_year_ok, YEAR(Calcs.`__time`) AS none_z_year_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS ctd_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_countd_date3__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, EXP((0.10000000000000001 * Calcs.num0)) AS sum_z_exp_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.num1 ELSE Calcs.num2 END)) AS sum_z_if_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2 ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) - YEAR('2004-07-04') AS BIGINT) AS sum_z_datediff_year_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 >= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ge_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, (1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) AS sum_z_datepart_weekday_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, CAST(YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) - YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) AS BIGINT) AS sum_z_today_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, MIN(Calcs.int0) AS min_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DAY(Calcs.`__time`) AS none_z_day_ok, DAY(Calcs.`__time`) AS sum_z_datepart_day_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq2_num_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq_num_nk, ABS(Calcs.num0) AS sum_abs_num0__ok, Calcs.num0 AS sum_num0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS cnt_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, Calcs.str2 RLIKE CONCAT('.*', Calcs.str3, '.*') AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, COUNT(DISTINCT Calcs.num0) AS ctd_num0_ok, COUNT(DISTINCT Calcs.num0) AS usr_z_countd_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_dayofyear_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num4 AS sum_num4_ok, ROUND(Calcs.num4,1) AS sum_z_round_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date2 AS none_date2_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.date3) + CAST((MONTH(Calcs.date3) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR(Calcs.date2) + CAST((MONTH(Calcs.date2) - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, CAST(Calcs.num4 AS STRING) AS none_z_str_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) < Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_min_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_z_len_str_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ROUND(Calcs.num0) AS sum_z_round_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2) AS daydiffs1__bin_, FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3) AS daydiffs2__bin_, FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4) AS daydiffs3__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2) AS yeardiffs1__bin_, FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3) AS yeardiffs2__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) AS yeardiffs3__bin_ FROM druid_tableau.calcs Calcs GROUP BY FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2), FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3), FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2), FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_weekday_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (1 IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs ;
+
+SELECT MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS STRING) AS none_z_datename_quarter_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) AS str2__group_, AVG(Calcs.num0) AS avg_num0_ok, COUNT(Calcs.num0) AS cnt_num0_ok, SUM(Calcs.num0) AS sum_num0_ok FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) ;
+
+SELECT SUM((((((((((CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT))) AS sum_maxint_sum_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num1 AS none_num1_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num1 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 = (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_eq_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) AS none_z_if_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) / LOG10(2) ELSE NULL END) AS sum_z_log2_num_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_log_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ABS(Calcs.num0) AS sum_z_abs_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.num0 AS temp_z_stdev_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdev_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdev_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) AS none_z_case_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS STRING) AS none_z_datename_dayofyear_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT COUNT(Calcs.date3) AS cnt_date3_ok, COUNT(Calcs.date3) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, (Calcs.num4 IS NULL) AS none_z_isnull_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS sum_z_float_str_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(SECOND(Calcs.`__time`), 0), 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_second_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, floor((datediff(Calcs.`__time`,'1995-01-01') - ( pmod(datediff(Calcs.`__time`, '1995-01-01'), 7) + 1) - datediff('2004-07-04','1995-01-01') + (pmod(datediff('2004-07-04', '1995-01-01'), 7) + 1))/7) AS sum_z_datediff_week_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs WHERE (YEAR(Calcs.date0) IS NULL) LIMIT 1 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, COALESCE(Calcs.str2, 'i\'m null') AS none_z_ifnull_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_month_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS sum_z_datepart_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) > Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_max_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs WHERE (NOT ((Calcs.str2 IS NULL) OR ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'six')))) GROUP BY Calcs.str2 ;
+
+SELECT MONTH(Calcs.`__time`) AS mn_datetime0_ok FROM druid_tableau.calcs Calcs WHERE (MONTH(Calcs.`__time`) IS NULL) LIMIT 1 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, Calcs.str1 RLIKE CONCAT('.*', 'IN', '.*') AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), CAST((-1) AS DOUBLE)) AS none_z_ifnull_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) ;
+
+SELECT (32000 + Calcs.num4) AS none_bignum_ok FROM druid_tableau.calcs Calcs GROUP BY (32000 + Calcs.num4) ;
+
+SELECT SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_hour_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) IS NULL) OR (LENGTH('ES') IS NULL) THEN NULL WHEN LENGTH('ES') < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str1),CAST(1 AS INT),CAST(LENGTH('ES') AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str1),CAST((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) AS INT),CAST(LENGTH('ES') AS INT)) END) = 'ES' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str2),CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str2),CAST((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, UPPER(Calcs.str2) AS none_z_upper_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), 0.0) AS none_z_zn_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) AS str2__group__1, SUM(1) AS sum_number_of_records_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, CASE WHEN Calcs.int2 = 0 THEN NULL ELSE ( Calcs.int3 / Calcs.int2 ) END AS sum_z_div_int_zero_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.num0 AS min_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, SECOND(Calcs.`__time`) AS sum_z_datepart_second_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS sum_z_datepart_dayofyear_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 = '1972-07-04') AS none_z_date_eq_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_day_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) * 60 + COALESCE(SECOND(Calcs.`__time`), 0) - COALESCE(SECOND('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_second_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) AS none_b11703_nk FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN (1 IS NULL) OR (LENGTH('BI') IS NULL) THEN NULL WHEN LENGTH('BI') < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) ELSE SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) END) = 'BI' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_minute_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') ;
+
+SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS cnt_str2_ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 < Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_min_date_date_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 >= '1975-11-12') AS none_z_date_ge_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (Calcs.str2 IS NULL) AS none_z_isnull_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) AS STRING) AS none_z_datename_year_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) AS none_z_case_null_null_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date1, Calcs.key, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) ;
+
+SELECT Calcs.num0 AS temp_z_avg_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_avg_num0___2730138885__0_, Calcs.num0 AS avg_num0_ok, Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_dayofyear_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 < 0 THEN CAST(NULL AS DOUBLE) ELSE SQRT(Calcs.num0) END) AS sum_z_sqrt_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 > Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_max_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) AS none_z_dateadd_year_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) ;
+
+SELECT Calcs.key AS none_key_nk, COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (-Calcs.num0) AS sum_z_neg_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 < Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_min_num_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DAY(Calcs.`__time`) AS STRING) AS none_z_datename_day_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, SECOND(Calcs.time1) AS sum_z_timepart_second_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 IS NULL) AS none_z_isnull_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int1 AS sum_int1_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.int0) AS sum_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, false AS none_z_false_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2 ORDER BY none_str2_nk ASC ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MONTH(Calcs.`__time`) AS none_z_month_ok, MONTH(Calcs.`__time`) AS sum_z_datepart_month_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 DESC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 + Calcs.num1) AS sum_z_num_plus_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_minute_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS BIGINT) AS sum_z_int_str_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, COS(Calcs.num0) AS sum_z_cos_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.num0 AS max_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, HOUR(Calcs.`__time`) AS sum_z_datepart_hour_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str2 AS temp_z_max_str2___3598104523__0_, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_len_str2__ok FROM druid_tableau.calcs Calcs ;
+
+SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) AS str2__bin_ FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <> (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ne_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT DAY(Calcs.date1) AS dy_date1_ok FROM druid_tableau.calcs Calcs WHERE (DAY(Calcs.date1) IS NULL) LIMIT 1 ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(HOUR(Calcs.`__time`) AS STRING), '') AS none_z_datename_hour_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(SECOND(Calcs.`__time`) AS STRING), '') AS none_z_datename_second_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_hour_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') ;
+
+SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num2 AS none_num2_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num2 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2 ;
+
+SELECT TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) AS none_date_datetime0__ok, COUNT(Calcs.key) AS cnt_key_ok FROM druid_tableau.calcs Calcs GROUP BY TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) ;
+
+SELECT Calcs.key AS none_key_nk, CONCAT(CONCAT(' ',Calcs.str2),' ') AS none_padded_str2_nk, CONCAT(CONCAT('|',RTRIM(CONCAT(CONCAT(' ',Calcs.str2),' '))),'|') AS none_z_rtrim_str_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_minute_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, HOUR(Calcs.time1) AS sum_z_timepart_hour_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs WHERE (Calcs.key IS NULL) LIMIT 1 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, POW(Calcs.num0,2) AS sum_z_square_num_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), (CASE WHEN MONTH(Calcs.`__time`)<4 THEN '-01' WHEN MONTH(Calcs.`__time`)<7 THEN '-04' WHEN MONTH(Calcs.`__time`)<10 THEN '-07' ELSE '-10' END), '-01 00:00:00') AS none_z_datetrunc_quarter_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MINUTE(Calcs.`__time`) AS sum_z_datepart_minute_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 < '1975-11-12') AS none_z_date_lt_date_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.str0 AS none_str0_nk, 'CONST' AS none_z_const_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str0 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, Calcs.int1 AS sum_int1_ok, CASE WHEN Calcs.int1 = 0 THEN NULL ELSE ( Calcs.int0 / Calcs.int1 ) END AS sum_z_div_int_int_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 ELSE Calcs.num1 END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1 ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, MINUTE(Calcs.time1) AS sum_z_timepart_minute_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, YEAR(Calcs.`__time`) AS none_z_year_ok, YEAR(Calcs.`__time`) AS sum_z_datepart_year_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 > (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str0 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_gt_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(MINUTE(Calcs.`__time`) AS STRING), '') AS none_z_datename_minute_nk FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, (Calcs.num0 >= Calcs.num1) AS none_z_num_ge_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str2 AS none_z_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2 ;
+
+SELECT Calcs.key AS none_key_nk, MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ;
+
+SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num0 AS DOUBLE) WHEN NOT (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num1 AS DOUBLE) ELSE NULL END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1 ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_quarter_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END ;
+
+SELECT MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0) ;
+
+SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs ;
diff --git ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
index 0ffaaf4..f2b5e8d 100644
--- ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
+++ ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out
@@ -27,54 +27,54 @@ POSTHOOK: Output: default@tstz1
PREHOOK: query: select `__time` from tstz1
PREHOOK: type: QUERY
PREHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select `__time` from tstz1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2016-01-03 12:26:34.0 US/Pacific
PREHOOK: query: select cast(`__time` as timestamp) from tstz1
PREHOOK: type: QUERY
PREHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select cast(`__time` as timestamp) from tstz1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2016-01-03 12:26:34
PREHOOK: query: select cast(`__time` as timestamp) from tstz1 where `__time` >= cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with local time zone)
PREHOOK: type: QUERY
PREHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select cast(`__time` as timestamp) from tstz1 where `__time` >= cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with local time zone)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2016-01-03 12:26:34
PREHOOK: query: select `__time` from tstz1
PREHOOK: type: QUERY
PREHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select `__time` from tstz1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2016-01-03 20:26:34.0 UTC
PREHOOK: query: select cast(`__time` as timestamp) from tstz1
PREHOOK: type: QUERY
PREHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select cast(`__time` as timestamp) from tstz1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2016-01-03 20:26:34
PREHOOK: query: select cast(`__time` as timestamp) from tstz1 where `__time` >= cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with local time zone)
PREHOOK: type: QUERY
PREHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select cast(`__time` as timestamp) from tstz1 where `__time` >= cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with local time zone)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tstz1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2016-01-03 20:26:34
diff --git ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
index 941b760..f82ecff 100644
--- ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
+++ ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out
@@ -101,56 +101,61 @@ POSTHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table
POSTHOOK: type: CREATETABLE_AS_SELECT
STAGE DEPENDENCIES:
Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1
+ Stage-4 depends on stages: Stage-2, Stage-0
+ Stage-3 depends on stages: Stage-4
Stage-0 depends on stages: Stage-1
- Stage-3 depends on stages: Stage-0
- Stage-2 depends on stages: Stage-3
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alltypesorc
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: ctimestamp1 is not null (type: boolean)
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: alltypesorc
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: ctimestamp1 is not null (type: boolean)
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: CAST( ctimestamp1 AS timestamp with local time zone) (type: timestamp with local time zone), cstring1 (type: string), cstring2 (type: string), cdouble (type: double), cfloat (type: float), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cboolean1 (type: boolean), cboolean2 (type: boolean)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp), (floor((1.0 / rand())) % 6) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
+ sort order: ++
+ Map-reduce partition columns: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean)
+ Reducer 2
+ Reduce Operator Tree:
Select Operator
- expressions: CAST( ctimestamp1 AS timestamp with local time zone) (type: timestamp with local time zone), cstring1 (type: string), cstring2 (type: string), cdouble (type: double), cfloat (type: float), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cboolean1 (type: boolean), cboolean2 (type: boolean)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp), (floor((1.0 / rand())) % 6) (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
- sort order: ++
- Map-reduce partition columns: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean)
- Reduce Operator Tree:
- Select Operator
- expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: tinyint), VALUE._col6 (type: smallint), VALUE._col7 (type: int), VALUE._col8 (type: bigint), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), KEY.__time_granularity (type: timestamp), KEY.__druid_extra_partition_key (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Dp Sort State: PARTITION_SORTED
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
- output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
- serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
- name: default.druid_partitioned_table
+ expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: tinyint), VALUE._col6 (type: smallint), VALUE._col7 (type: int), VALUE._col8 (type: bigint), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), KEY.__time_granularity (type: timestamp), KEY.__druid_extra_partition_key (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Dp Sort State: PARTITION_SORTED
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
+ output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
+ serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
+ name: default.druid_partitioned_table
- Stage: Stage-0
- Move Operator
- files:
- hdfs directory: true
-#### A masked pattern was here ####
+ Stage: Stage-2
+ Dependency Collection
- Stage: Stage-3
+ Stage: Stage-4
Create Table Operator:
Create Table
columns: __time timestamp with local time zone, cstring1 string, cstring2 string, cdouble double, cfloat float, ctinyint tinyint, csmallint smallint, cint int, cbigint bigint, cboolean1 boolean, cboolean2 boolean
@@ -161,10 +166,16 @@ STAGE PLANS:
druid.segment.granularity HOUR
druid.segment.targetShardsPerGranularity 6
- Stage: Stage-2
+ Stage: Stage-3
Stats Work
Basic Stats Work:
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://### HDFS PATH ###
+
PREHOOK: query: CREATE TABLE druid_partitioned_table
STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
TBLPROPERTIES (
@@ -227,11 +238,11 @@ POSTHOOK: Lineage: druid_partitioned_table.ctinyint SIMPLE [(alltypesorc)alltype
PREHOOK: query: SELECT sum(cfloat) FROM druid_partitioned_table
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_partitioned_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT sum(cfloat) FROM druid_partitioned_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_partitioned_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
-39590.246
PREHOOK: query: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`,
cstring1,
@@ -247,7 +258,7 @@ PREHOOK: query: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local tim
FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@alltypesorc
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local time zone)) as `__time`,
cstring1,
cstring2,
@@ -262,7 +273,7 @@ POSTHOOK: query: SELECT floor_hour(cast(`ctimestamp1` as timestamp with local ti
FROM alltypesorc where ctimestamp1 IS NOT NULL order by `__time`, cstring2 DESC NULLS LAST, cstring1 DESC NULLS LAST LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:00:00.0 US/Pacific NULL yx36UAT823Cm -200.0 52.0 52 -200 NULL 2029007949 NULL true
1969-12-31 15:00:00.0 US/Pacific NULL yvcx4HYTT8tvAm6CNbXHaH -7196.0 40.0 40 -7196 NULL 437984126 NULL false
1969-12-31 15:00:00.0 US/Pacific NULL ysho54gMb 15601.0 -22.0 -22 15601 NULL 1553802956 NULL false
@@ -325,42 +336,49 @@ STAGE PLANS:
Pre-Insert task
Stage: Stage-3
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alltypesorc
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: ctimestamp2 is not null (type: boolean)
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: alltypesorc
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: ctimestamp2 is not null (type: boolean)
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: CAST( ctimestamp2 AS timestamp with local time zone) (type: timestamp with local time zone), cstring1 (type: string), cstring2 (type: string), cdouble (type: double), cfloat (type: float), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cboolean1 (type: boolean), cboolean2 (type: boolean)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp), (floor((1.0 / rand())) % 6) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
+ sort order: ++
+ Map-reduce partition columns: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean)
+ Reducer 2
+ Reduce Operator Tree:
Select Operator
- expressions: CAST( ctimestamp2 AS timestamp with local time zone) (type: timestamp with local time zone), cstring1 (type: string), cstring2 (type: string), cdouble (type: double), cfloat (type: float), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cboolean1 (type: boolean), cboolean2 (type: boolean)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp), (floor((1.0 / rand())) % 6) (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
- sort order: ++
- Map-reduce partition columns: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean)
- Reduce Operator Tree:
- Select Operator
- expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: tinyint), VALUE._col6 (type: smallint), VALUE._col7 (type: int), VALUE._col8 (type: bigint), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), KEY.__time_granularity (type: timestamp), KEY.__druid_extra_partition_key (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Dp Sort State: PARTITION_SORTED
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
- output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
- serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
- name: default.druid_partitioned_table
+ expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: tinyint), VALUE._col6 (type: smallint), VALUE._col7 (type: int), VALUE._col8 (type: bigint), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), KEY.__time_granularity (type: timestamp), KEY.__druid_extra_partition_key (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Dp Sort State: PARTITION_SORTED
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
+ output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
+ serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
+ name: default.druid_partitioned_table
PREHOOK: query: INSERT INTO TABLE druid_partitioned_table
SELECT cast (`ctimestamp2` as timestamp with local time zone) as `__time`,
@@ -397,11 +415,11 @@ POSTHOOK: Output: default@druid_partitioned_table
PREHOOK: query: SELECT sum(cfloat) FROM druid_partitioned_table
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_partitioned_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT sum(cfloat) FROM druid_partitioned_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_partitioned_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
-46301.883
PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE druid_partitioned_table
SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
@@ -455,42 +473,49 @@ STAGE PLANS:
Pre-Insert task
Stage: Stage-3
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: alltypesorc
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: ctimestamp1 is not null (type: boolean)
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: alltypesorc
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: ctimestamp1 is not null (type: boolean)
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: CAST( ctimestamp1 AS timestamp with local time zone) (type: timestamp with local time zone), cstring1 (type: string), cstring2 (type: string), cdouble (type: double), cfloat (type: float), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cboolean1 (type: boolean), cboolean2 (type: boolean)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp), (floor((1.0 / rand())) % 6) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
+ sort order: ++
+ Map-reduce partition columns: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ value expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean)
+ Reducer 2
+ Reduce Operator Tree:
Select Operator
- expressions: CAST( ctimestamp1 AS timestamp with local time zone) (type: timestamp with local time zone), cstring1 (type: string), cstring2 (type: string), cdouble (type: double), cfloat (type: float), ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cboolean1 (type: boolean), cboolean2 (type: boolean)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp), (floor((1.0 / rand())) % 6) (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
- sort order: ++
- Map-reduce partition columns: __time_granularity (type: timestamp), __druid_extra_partition_key (type: bigint)
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: timestamp with local time zone), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: float), _col5 (type: tinyint), _col6 (type: smallint), _col7 (type: int), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: boolean)
- Reduce Operator Tree:
- Select Operator
- expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: tinyint), VALUE._col6 (type: smallint), VALUE._col7 (type: int), VALUE._col8 (type: bigint), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), KEY.__time_granularity (type: timestamp), KEY.__druid_extra_partition_key (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Dp Sort State: PARTITION_SORTED
- Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
- output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
- serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
- name: default.druid_partitioned_table
+ expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: tinyint), VALUE._col6 (type: smallint), VALUE._col7 (type: int), VALUE._col8 (type: bigint), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), KEY.__time_granularity (type: timestamp), KEY.__druid_extra_partition_key (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, __time_granularity, __druid_extra_partition_key
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Dp Sort State: PARTITION_SORTED
+ Statistics: Num rows: 12288 Data size: 2601650 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
+ output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
+ serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
+ name: default.druid_partitioned_table
PREHOOK: query: INSERT OVERWRITE TABLE druid_partitioned_table
SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
@@ -527,11 +552,11 @@ POSTHOOK: Output: default@druid_partitioned_table
PREHOOK: query: SELECT sum(cfloat) FROM druid_partitioned_table
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_partitioned_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT sum(cfloat) FROM druid_partitioned_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_partitioned_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
-39590.246
PREHOOK: query: CREATE TABLE druid_max_size_partition
STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
@@ -593,11 +618,11 @@ POSTHOOK: Lineage: druid_max_size_partition.ctinyint SIMPLE [(alltypesorc)alltyp
PREHOOK: query: SELECT sum(cfloat) FROM druid_max_size_partition
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_max_size_partition
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT sum(cfloat) FROM druid_max_size_partition
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_max_size_partition
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
-39590.246
PREHOOK: query: DROP TABLE druid_partitioned_table_0
PREHOOK: type: DROPTABLE
diff --git ql/src/test/results/clientpositive/druid/druidmini_mv.q.out ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
index efd6c59..f75a773 100644
--- ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
+++ ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
@@ -70,11 +70,11 @@ POSTHOOK: Output: default@cmv_mat_view
PREHOOK: query: SELECT a, b, c FROM cmv_mat_view
PREHOOK: type: QUERY
PREHOOK: Input: default@cmv_mat_view
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT a, b, c FROM cmv_mat_view
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cmv_mat_view
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
2 bob 3.140000104904175
2 bonnie 172342.203125
PREHOOK: query: SHOW TBLPROPERTIES cmv_mat_view
@@ -115,11 +115,11 @@ POSTHOOK: Output: default@cmv_mat_view2
PREHOOK: query: SELECT a, c FROM cmv_mat_view2
PREHOOK: type: QUERY
PREHOOK: Input: default@cmv_mat_view2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT a, c FROM cmv_mat_view2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cmv_mat_view2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
3 978.760009765625
6 25.600000381469727
PREHOOK: query: SHOW TBLPROPERTIES cmv_mat_view2
@@ -146,53 +146,38 @@ FROM cmv_basetable
WHERE a = 3
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 depends on stages: Stage-1
+ Stage-0 is a root stage
STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 10770 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (a = 3) (type: boolean)
- Statistics: Num rows: 3 Data size: 5385 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: 3 (type: int), c (type: double)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 5385 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 3 Data size: 5385 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
Stage: Stage-0
Fetch Operator
limit: -1
Processor Tree:
- ListSink
+ TableScan
+ alias: cmv_basetable
+ Filter Operator
+ predicate: (a = 3) (type: boolean)
+ Select Operator
+ expressions: 3 (type: int), c (type: double)
+ outputColumnNames: _col0, _col1
+ ListSink
PREHOOK: query: SELECT a, c
FROM cmv_basetable
WHERE a = 3
PREHOOK: type: QUERY
PREHOOK: Input: default@cmv_basetable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT a, c
FROM cmv_basetable
WHERE a = 3
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cmv_basetable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
3 15.8
3 9.8
3 978.76
-Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
PREHOOK: query: EXPLAIN
SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
@@ -213,56 +198,65 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 10770 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (a = 3) (type: boolean)
- Statistics: Num rows: 3 Data size: 5385 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: c (type: double)
- outputColumnNames: _col0
- Statistics: Num rows: 3 Data size: 5385 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 3 Data size: 5385 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: double)
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 10770 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: ((3 = a) and (d = 3)) (type: boolean)
- Statistics: Num rows: 1 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: c (type: double)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: double)
- Reduce Operator Tree:
- Join Operator
- condition map:
- Inner Join 0 to 1
- keys:
- 0
- 1
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 10773 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 3 Data size: 10773 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 3 Data size: 10773 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (XPROD_EDGE), Map 3 (XPROD_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (a = 3) (type: boolean)
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: double)
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((3 = a) and (d = 3)) (type: boolean)
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -270,7 +264,7 @@ STAGE PLANS:
Processor Tree:
ListSink
-Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
PREHOOK: query: SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
JOIN
@@ -278,7 +272,7 @@ PREHOOK: query: SELECT * FROM (
ON table1.a = table2.a)
PREHOOK: type: QUERY
PREHOOK: Input: default@cmv_basetable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
JOIN
@@ -286,7 +280,7 @@ POSTHOOK: query: SELECT * FROM (
ON table1.a = table2.a)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cmv_basetable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
3 15.8 3 978.76
3 9.8 3 978.76
3 978.76 3 978.76
@@ -305,7 +299,7 @@ POSTHOOK: Lineage: cmv_basetable.b SCRIPT []
POSTHOOK: Lineage: cmv_basetable.c SCRIPT []
POSTHOOK: Lineage: cmv_basetable.d SCRIPT []
POSTHOOK: Lineage: cmv_basetable.t SCRIPT []
-Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
PREHOOK: query: EXPLAIN
SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
@@ -326,56 +320,65 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 20240 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (a = 3) (type: boolean)
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: c (type: double)
- outputColumnNames: _col0
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: double)
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 20240 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: ((3 = a) and (d = 3)) (type: boolean)
- Statistics: Num rows: 1 Data size: 3373 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: c (type: double)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 3373 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 3373 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: double)
- Reduce Operator Tree:
- Join Operator
- condition map:
- Inner Join 0 to 1
- keys:
- 0
- 1
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 20242 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 3 Data size: 20242 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 3 Data size: 20242 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (XPROD_EDGE), Map 3 (XPROD_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (a = 3) (type: boolean)
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: double)
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((3 = a) and (d = 3)) (type: boolean)
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -383,7 +386,7 @@ STAGE PLANS:
Processor Tree:
ListSink
-Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
PREHOOK: query: SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
JOIN
@@ -391,7 +394,7 @@ PREHOOK: query: SELECT * FROM (
ON table1.a = table2.a)
PREHOOK: type: QUERY
PREHOOK: Input: default@cmv_basetable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
JOIN
@@ -399,7 +402,7 @@ POSTHOOK: query: SELECT * FROM (
ON table1.a = table2.a)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cmv_basetable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
3 15.8 3 978.76
3 15.8 3 978.76
3 9.8 3 978.76
@@ -413,7 +416,7 @@ POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-0 is a root stage
Stage-2
- Stage-4 depends on stages: Stage-2, Stage-1, Stage-3
+ Stage-5 depends on stages: Stage-2, Stage-1, Stage-3
Stage-1 is a root stage
Stage-3 is a root stage
@@ -430,7 +433,7 @@ STAGE PLANS:
Insert operator:
Insert
- Stage: Stage-4
+ Stage: Stage-5
Materialized View Work
Stage: Stage-1
@@ -438,42 +441,49 @@ STAGE PLANS:
Pre-Insert task
Stage: Stage-3
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 20240 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (a = 3) (type: boolean)
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (a = 3) (type: boolean)
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: CAST( t AS timestamp with local time zone) (type: timestamp with local time zone), 3 (type: int), b (type: varchar(256)), c (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp)
+ outputColumnNames: _col0, _col1, _col2, _col3, __time_granularity
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: __time_granularity (type: timestamp)
+ sort order: +
+ Map-reduce partition columns: __time_granularity (type: timestamp)
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
Select Operator
- expressions: CAST( t AS timestamp with local time zone) (type: timestamp with local time zone), 3 (type: int), b (type: varchar(256)), c (type: double)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp)
- outputColumnNames: _col0, _col1, _col2, _col3, __time_granularity
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: __time_granularity (type: timestamp)
- sort order: +
- Map-reduce partition columns: __time_granularity (type: timestamp)
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double)
- Reduce Operator Tree:
- Select Operator
- expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: int), VALUE._col2 (type: varchar(256)), VALUE._col3 (type: double), KEY.__time_granularity (type: timestamp)
- outputColumnNames: _col0, _col1, _col2, _col3, __time_granularity
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Dp Sort State: PARTITION_SORTED
- Statistics: Num rows: 3 Data size: 10120 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
- output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
- serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
- name: default.cmv_mat_view2
+ expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: int), VALUE._col2 (type: varchar(256)), VALUE._col3 (type: double), KEY.__time_granularity (type: timestamp)
+ outputColumnNames: _col0, _col1, _col2, _col3, __time_granularity
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Dp Sort State: PARTITION_SORTED
+ Statistics: Num rows: 6 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
+ output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
+ serde: org.apache.hadoop.hive.druid.serde.DruidSerDe
+ name: default.cmv_mat_view2
PREHOOK: query: ALTER MATERIALIZED VIEW cmv_mat_view2 REBUILD
PREHOOK: type: QUERY
@@ -496,7 +506,7 @@ rawDataSize 0
storage_handler org.apache.hadoop.hive.druid.DruidStorageHandler
totalSize 0
#### A masked pattern was here ####
-Warning: Shuffle Join JOIN[6][tables = [cmv_mat_view2, $hdt$_0]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
PREHOOK: query: EXPLAIN
SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
@@ -517,52 +527,65 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: cmv_basetable
- Statistics: Num rows: 6 Data size: 20240 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: ((3 = a) and (d = 3)) (type: boolean)
- Statistics: Num rows: 1 Data size: 3373 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: c (type: double)
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 3373 Basic stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 1 Data size: 3373 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: double)
- TableScan
- alias: cmv_mat_view2
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.cmv_mat_view2","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":["c"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 3 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- sort order:
- Statistics: Num rows: 3 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- value expressions: c (type: double)
- Reduce Operator Tree:
- Join Operator
- condition map:
- Inner Join 0 to 1
- keys:
- 0
- 1
- outputColumnNames: _col1, _col5
- Statistics: Num rows: 3 Data size: 10122 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: 3 (type: int), _col1 (type: double), 3 (type: int), _col5 (type: double)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 3 Data size: 10122 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 3 Data size: 10122 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (XPROD_EDGE), Map 3 (XPROD_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (a = 3) (type: boolean)
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 6 Data size: 72 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: double)
+ Map 3
+ Map Operator Tree:
+ TableScan
+ alias: cmv_basetable
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((3 = a) and (d = 3)) (type: boolean)
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: c (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 6 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 36 Data size: 1044 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -570,7 +593,7 @@ STAGE PLANS:
Processor Tree:
ListSink
-Warning: Shuffle Join JOIN[6][tables = [cmv_mat_view2, $hdt$_0]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
PREHOOK: query: SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
JOIN
@@ -578,8 +601,7 @@ PREHOOK: query: SELECT * FROM (
ON table1.a = table2.a)
PREHOOK: type: QUERY
PREHOOK: Input: default@cmv_basetable
-PREHOOK: Input: default@cmv_mat_view2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT * FROM (
(SELECT a, c FROM cmv_basetable WHERE a = 3) table1
JOIN
@@ -587,11 +609,11 @@ POSTHOOK: query: SELECT * FROM (
ON table1.a = table2.a)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cmv_basetable
-POSTHOOK: Input: default@cmv_mat_view2
-#### A masked pattern was here ####
-3 15.800000190734863 3 978.76
-3 25.600000381469727 3 978.76
-3 978.760009765625 3 978.76
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+3 15.8 3 978.76
+3 15.8 3 978.76
+3 9.8 3 978.76
+3 978.76 3 978.76
PREHOOK: query: DROP MATERIALIZED VIEW cmv_mat_view
PREHOOK: type: DROP_MATERIALIZED_VIEW
PREHOOK: Input: default@cmv_mat_view
diff --git ql/src/test/results/clientpositive/druid/druidmini_tableau_explain.q.out ql/src/test/results/clientpositive/druid/druidmini_tableau_explain.q.out
new file mode 100644
index 0000000..73fc40e
--- /dev/null
+++ ql/src/test/results/clientpositive/druid/druidmini_tableau_explain.q.out
@@ -0,0 +1,6629 @@
+PREHOOK: query: create database druid_tableau
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:druid_tableau
+POSTHOOK: query: create database druid_tableau
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:druid_tableau
+PREHOOK: query: use druid_tableau
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:druid_tableau
+POSTHOOK: query: use druid_tableau
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:druid_tableau
+PREHOOK: query: drop table if exists calcs
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists calcs
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table calcs
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES (
+ "druid.segment.granularity" = "MONTH",
+ "druid.query.granularity" = "DAY")
+AS SELECT
+ cast(datetime0 as timestamp with local time zone) `__time`,
+ key,
+ str0, str1, str2, str3,
+ date0, date1, date2, date3,
+ time0, time1,
+ datetime1,
+ zzz,
+ cast(bool0 as string) bool0,
+ cast(bool1 as string) bool1,
+ cast(bool2 as string) bool2,
+ cast(bool3 as string) bool3,
+ int0, int1, int2, int3,
+ num0, num1, num2, num3, num4
+from default.calcs_orc
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@calcs_orc
+PREHOOK: Output: database:druid_tableau
+PREHOOK: Output: druid_tableau@calcs
+POSTHOOK: query: create table calcs
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES (
+ "druid.segment.granularity" = "MONTH",
+ "druid.query.granularity" = "DAY")
+AS SELECT
+ cast(datetime0 as timestamp with local time zone) `__time`,
+ key,
+ str0, str1, str2, str3,
+ date0, date1, date2, date3,
+ time0, time1,
+ datetime1,
+ zzz,
+ cast(bool0 as string) bool0,
+ cast(bool1 as string) bool1,
+ cast(bool2 as string) bool2,
+ cast(bool3 as string) bool3,
+ int0, int1, int2, int3,
+ num0, num1, num2, num3, num4
+from default.calcs_orc
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@calcs_orc
+POSTHOOK: Output: database:druid_tableau
+POSTHOOK: Output: druid_tableau@calcs
+POSTHOOK: Lineage: calcs.__time EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:datetime0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.bool0 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool0, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.bool1 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool1, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.bool2 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool2, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.bool3 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool3, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.date0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.date1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.date2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date2, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.date3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date3, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.datetime1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:datetime1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.int0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int0, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.int1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int1, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.int2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int2, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.int3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int3, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.key SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.num0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num0, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num1, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num2, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num3, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num4 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num4, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.str0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.str1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.str2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str2, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.str3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str3, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.time0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:time0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.time1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:time1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.zzz SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:zzz, type:string, comment:null), ]
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_ FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_ FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2,$f3,$f4
+ druid.fieldTypes int,double,bigint,double,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num0"},{"type":"filtered","filter":{"type":"not","field":{"type":"selector","dimension":"num0","value":null}},"aggregator":{"type":"count","name":"$f2","fieldName":"num0"}},{"type":"doubleSum","name":"$f3","expression":"(\"num0\" * \"num0\")"},{"type":"count","name":"$f4"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f4","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: double), $f2 (type: bigint), $f3 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["date0","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), (num0 * num0) (type: double), year(date0) (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int0
+ druid.fieldTypes string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int0 (type: int), CASE WHEN ((int0 < 0)) THEN (null) ELSE (power(int0, 0.5)) END (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), UDFToLong((((datediff(vc, '2004-07-04') * 24) + COALESCE(hour(vc),0)) - 0)) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) - YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) AS BIGINT) AS sum_z_now_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) - YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) AS BIGINT) AS sum_z_now_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc
+ druid.fieldTypes string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"0","outputType":"LONG"}],"columns":["key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: bigint)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS temp_z_min_str2___3992540197__0_, LENGTH(Calcs.str2) AS min_len_str2__ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS temp_z_min_str2___3992540197__0_, LENGTH(Calcs.str2) AS min_len_str2__ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2,vc,key,vc0
+ druid.fieldTypes string,int,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"strlen(\"str2\")","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"\"str2\"","outputType":"STRING"}],"columns":["str2","vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: str2 (type: string), vc (type: int), key (type: string), vc0 (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num0 AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num0 AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"num0\"","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_weekday_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_weekday_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), to_date(vc) (type: date)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.`__time`) + CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR('2004-07-04') + CAST((MONTH('2004-07-04') - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.`__time`) + CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR('2004-07-04') + CAST((MONTH('2004-07-04') - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(((4 * timestamp_extract(\"__time\",'YEAR','US/Pacific')) + CAST(((CAST((timestamp_extract(\"__time\",'MONTH','US/Pacific') - 1), 'DOUBLE') / CAST(3, 'DOUBLE')) + CAST(1, 'DOUBLE')), 'LONG')) - 8019)","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_var_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_var_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_var_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_var_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_var_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_var_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), (num0 * num0) (type: double), key (type: string), null (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, COALESCE(Calcs.date0, '2010-04-12') AS none_z_ifnull_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, COALESCE(Calcs.date0, '2010-04-12') AS none_z_ifnull_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date0
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), COALESCE(date0,'2010-04-12') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT))) AS none_z_datetrunc_week_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT))) AS none_z_datetrunc_week_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), from_unixtime((to_unix_timestamp(date_add(to_date(concat(to_date(vc), ' 00:00:00')), (- ((1 + if(false, (((if(false, ((datediff(to_date(vc), '1995-01-01') pmod 7) - 7), (datediff(to_date(vc), '1995-01-01') pmod 7)) + 7) pmod 7) - 7), ((if(false, ((datediff(to_date(vc), '1995-01-01') pmod 7) - 7), (datediff(to_date(vc), '1995-01-01') pmod 7)) + 7) pmod 7))) - 1))),'yyyy-MM-dd') + (to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') - to_unix_timestamp(to_date(concat(to_date(vc), ' 00:00:00')),'yyyy-MM-dd'))), 'yyyy-MM-dd HH:mm:ss'), date_add(concat(to_date(vc), ' 00:00:00'), (- ((1 + if(false, (((if(false, ((datediff(to_date(vc), '1995-01-01') pmod 7) - 7), (datediff(to_date(vc), '1995-01-01') pmod 7)) + 7) pmod 7) - 7), ((if(false, ((datediff(to_date(vc), '1995-01-01') pmod 7) - 7), (datediff(to_date(vc), '1995-01-01') pmod 7)) + 7) pmod 7))) - 1)))) (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 = 'sixteen') OR (Calcs.str2 IS NULL)) GROUP BY Calcs.str2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 = 'sixteen') OR (Calcs.str2 IS NULL)) GROUP BY Calcs.str2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"}],"limitSpec":{"type":"default"},"filter":{"type":"or","fields":[{"type":"selector","dimension":"str2","value":"sixteen"},{"type":"selector","dimension":"str2","value":null}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num3"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: str2 (type: string), $f1 (type: double)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, CASE WHEN 3 >= 0 THEN SUBSTRING(Calcs.str2,1,CAST(3 AS INT)) ELSE NULL END AS none_z_left_str_num_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, CASE WHEN 3 >= 0 THEN SUBSTRING(Calcs.str2,1,CAST(3 AS INT)) ELSE NULL END AS none_z_left_str_num_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,vc
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(\"str2\", 0, 3)","outputType":"STRING"}],"columns":["key","str2","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT))) AS none_z_datetrunc_month_ok, MONTH(Calcs.`__time`) AS none_z_month_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT))) AS none_z_datetrunc_month_ok, MONTH(Calcs.`__time`) AS none_z_month_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), from_unixtime((to_unix_timestamp(date_add(to_date(concat(to_date(vc), ' 00:00:00')), UDFToInteger(UDFToLong((- (day(vc) - 1))))),'yyyy-MM-dd') + (to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') - to_unix_timestamp(to_date(concat(to_date(vc), ' 00:00:00')),'yyyy-MM-dd'))), 'yyyy-MM-dd HH:mm:ss'), date_add(concat(to_date(vc), ' 00:00:00'), UDFToInteger(UDFToLong((- (day(vc) - 1)))))) (type: string), month(vc) (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_stdevp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdevp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdevp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS stp_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_stdevp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdevp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdevp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS stp_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), (num0 * num0) (type: double), key (type: string), CASE WHEN (num0 is null) THEN (null) WHEN (num0 is not null) THEN (0.0) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, (CASE WHEN Calcs.int2 = 0 THEN NULL ELSE CAST(Calcs.int3 AS DOUBLE) / Calcs.int2 END) AS sum_z_int_div_zero_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, (CASE WHEN Calcs.int2 = 0 THEN NULL ELSE CAST(Calcs.int3 AS DOUBLE) / Calcs.int2 END) AS sum_z_int_div_zero_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int2,int3
+ druid.fieldTypes string,int,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int2","int3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int2 (type: int), int3 (type: int), CASE WHEN ((int2 = 0)) THEN (null) ELSE ((UDFToDouble(int3) / UDFToDouble(int2))) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 > Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_max_date_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 > Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_max_date_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date0,date1
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date0","date1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), date1 (type: string), key (type: string), CASE WHEN ((date0 is null or date1 is null)) THEN (null) WHEN ((date0 > date1)) THEN (date0) ELSE (date1) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int3 AS sum_int3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int3 AS sum_int3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int3
+ druid.fieldTypes string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int3 (type: int)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, true AS none_z_true_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, true AS none_z_true_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc
+ druid.fieldTypes string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"columns":["key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00') AS none_z_datetrunc_day_ok, DAY(Calcs.`__time`) AS none_z_day_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00') AS none_z_datetrunc_day_ok, DAY(Calcs.`__time`) AS none_z_day_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), concat(to_date(vc), ' 00:00:00') (type: string), day(vc) (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_str_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_str_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,date3
+ druid.fieldTypes int,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(((timestamp_extract(\"date3\",'YEAR','US/Pacific') * 10000) + (timestamp_extract(\"date3\",'MONTH','US/Pacific') * 100)) + timestamp_extract(\"date3\",'DAY','US/Pacific'))","outputType":"LONG"}],"columns":["vc","key","date3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: int), key (type: string), date3 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END AS none_z_left_str_negative_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END AS none_z_left_str_negative_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"str0","outputName":"str0","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), str0 (type: string), null (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,date1,key,num0,num1,vc
+ druid.fieldTypes string,string,string,double,double,string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"date0","outputName":"date0","outputType":"STRING"},{"type":"default","dimension":"date1","outputName":"date1","outputType":"STRING"},{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"num0","outputName":"num0","outputType":"DOUBLE"},{"type":"default","dimension":"num1","outputName":"num1","outputType":"DOUBLE"},{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"case_searched((\"num0\" > \"num1\"),\"date0\",\"date1\")","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: date0 (type: string), date1 (type: string), key (type: string), num0 (type: double), num1 (type: double), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) AS none_z_case_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) AS none_z_case_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date0,date1,date2,num0,num1
+ druid.fieldTypes string,string,string,string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date0","date1","date2","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: date0 (type: string), date1 (type: string), date2 (type: string), key (type: string), num0 (type: double), num1 (type: double), if(CASE WHEN ((num0 > num1)) THEN (true) WHEN ((num0 <= num1)) THEN (false) ELSE (null) END, date0, if(CASE WHEN ((num0 > num1)) THEN (false) WHEN ((num0 <= num1)) THEN (true) ELSE (null) END, date1, date2)) (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: double), _col5 (type: double), _col6 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: double), _col5 (type: double), _col6 (type: string)
+ sort order: +++++++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: double), _col5 (type: double), _col6 (type: string)
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: string), KEY._col4 (type: double), KEY._col5 (type: double), KEY._col6 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Statistics: Num rows: 8 Data size: 6016 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 6016 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: key, num0, vc
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc), var_samp(num0)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), var_samp(VALUE._col3)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), CASE WHEN ((_col2 > 1)) THEN (_col4) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') AS tyr_date0_ok FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00')
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') AS tyr_date0_ok FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc
+ druid.fieldTypes string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"concat(CAST(timestamp_extract(\"date0\",'YEAR','US/Pacific'), 'STRING'),'-01-01 00:00:00')","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: vc (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, LOWER(Calcs.str1) AS none_z_lower_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, LOWER(Calcs.str1) AS none_z_lower_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str1,vc
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"lower(\"str1\")","outputType":"STRING"}],"columns":["key","str1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str1 (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_date_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_date_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date3,key,vc
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"date3\"","outputType":"STRING"}],"columns":["date3","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date3 (type: string), key (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'two')) GROUP BY Calcs.str2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'two')) GROUP BY Calcs.str2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"}],"limitSpec":{"type":"default"},"filter":{"type":"and","fields":[{"type":"bound","dimension":"str2","lower":"eight","lowerStrict":false,"ordering":"lexicographic"},{"type":"bound","dimension":"str2","upper":"two","upperStrict":false,"ordering":"lexicographic"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num3"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: str2 (type: string), $f1 (type: double)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT CAST(CAST(Calcs.num4 AS BIGINT) AS STRING) AS none_b21622_nk, Calcs.key AS none_key_nk, SUM(Calcs.num4) AS sum_num4_ok FROM druid_tableau.calcs Calcs GROUP BY CAST(CAST(Calcs.num4 AS BIGINT) AS STRING), Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT CAST(CAST(Calcs.num4 AS BIGINT) AS STRING) AS none_b21622_nk, Calcs.key AS none_key_nk, SUM(Calcs.num4) AS sum_num4_ok FROM druid_tableau.calcs Calcs GROUP BY CAST(CAST(Calcs.num4 AS BIGINT) AS STRING), Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,$f2
+ druid.fieldTypes string,string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"},{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"CAST(CAST(\"num4\", 'LONG'), 'STRING')","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f2","fieldName":"num4"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: vc (type: string), key (type: string), $f2 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["date0","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), (num0 * num0) (type: double), year(date0) (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, CONCAT(Calcs.date3, ' 00:00:00') AS none_z_date_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, CONCAT(Calcs.date3, ' 00:00:00') AS none_z_date_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes int,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(((timestamp_extract(\"date3\",'YEAR','US/Pacific') * 10000) + (timestamp_extract(\"date3\",'MONTH','US/Pacific') * 100)) + timestamp_extract(\"date3\",'DAY','US/Pacific'))","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"concat(\"date3\",' 00:00:00')","outputType":"STRING"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: int), key (type: string), vc0 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_varp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_varp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_varp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS vrp_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_varp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_varp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_varp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS vrp_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), (num0 * num0) (type: double), key (type: string), CASE WHEN (num0 is null) THEN (null) WHEN (num0 is not null) THEN (0.0) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,vc
+ druid.fieldTypes string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"strlen(\"str2\")","outputType":"LONG"}],"columns":["key","str2","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), str2 (type: string), vc (type: int)
+ outputColumnNames: key, str2, vc
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: max(str2), max(vc)
+ keys: key (type: string), str2 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col2 (type: string), _col3 (type: int)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0), max(VALUE._col1)
+ keys: KEY._col0 (type: string), KEY._col1 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 2976 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 2976 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 < Calcs.num1) AS none_z_num_lt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 < Calcs.num1) AS none_z_num_lt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc,num0,num1
+ druid.fieldTypes string,boolean,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" < \"num1\")","outputType":"LONG"}],"columns":["key","vc","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean), num0 (type: double), num1 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,num0,vc0
+ druid.fieldTypes int,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["vc","num0","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), num0 (type: double), vc0 (type: double)
+ outputColumnNames: vc, num0, vc0
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc0), stddev_samp(num0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct), _col5 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), stddev_samp(VALUE._col3), count(VALUE._col4)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col5 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), CASE WHEN ((_col2 > 1)) THEN (_col4) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT COUNT(Calcs.str2) AS cnt_str2_ok, COUNT(Calcs.str2) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT COUNT(Calcs.str2) AS cnt_str2_ok, COUNT(Calcs.str2) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,bigint,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"filtered","filter":{"type":"not","field":{"type":"selector","dimension":"str2","value":null}},"aggregator":{"type":"count","name":"$f1","fieldName":"str2"}},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: bigint), $f1 (type: bigint)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT * FROM druid_tableau.calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT * FROM druid_tableau.calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames __time,key,str0,str1,str2,str3,date0,date1,date2,date3,time0,time1,datetime1,zzz,bool0,bool1,bool2,bool3,int0,int1,int2,int3,num0,num1,num2,num3,num4
+ druid.fieldTypes timestamp with local time zone,string,string,string,string,string,string,string,string,string,string,string,string,string,string,string,string,string,int,int,int,int,double,double,double,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["__time","key","str0","str1","str2","str3","date0","date1","date2","date3","time0","time1","datetime1","zzz","bool0","bool1","bool2","bool3","int0","int1","int2","int3","num0","num1","num2","num3","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: __time (type: timestamp with local time zone), key (type: string), str0 (type: string), str1 (type: string), str2 (type: string), str3 (type: string), date0 (type: string), date1 (type: string), date2 (type: string), date3 (type: string), time0 (type: string), time1 (type: string), datetime1 (type: string), zzz (type: string), bool0 (type: string), bool1 (type: string), bool2 (type: string), bool3 (type: string), int0 (type: int), int1 (type: int), int2 (type: int), int3 (type: int), num0 (type: double), num1 (type: double), num2 (type: double), num3 (type: double), num4 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) AS none_z_if_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) AS none_z_if_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1,str0,str2,str3,vc
+ druid.fieldTypes string,double,double,string,string,string,string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"num0","outputName":"num0","outputType":"DOUBLE"},{"type":"default","dimension":"num1","outputName":"num1","outputType":"DOUBLE"},{"type":"default","dimension":"str0","outputName":"str0","outputType":"STRING"},{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"},{"type":"default","dimension":"str3","outputName":"str3","outputType":"STRING"},{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"case_searched((\"num0\" > \"num1\"),\"str2\",(\"num0\" <= \"num1\"),\"str3\",\"str0\")","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), str0 (type: string), str2 (type: string), str3 (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 <= '1975-11-12') AS none_z_date_le_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 <= '1975-11-12') AS none_z_date_le_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,key,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"date0\" <= '1975-11-12')","outputType":"LONG"}],"columns":["date0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM(IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.num0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.num1,Calcs.num2))) AS sum_z_case_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM(IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.num0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.num1,Calcs.num2))) AS sum_z_case_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1,num2
+ druid.fieldTypes string,double,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0","num1","num2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3536 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), num2 (type: double), if(CASE WHEN ((num0 > num1)) THEN (true) WHEN ((num0 <= num1)) THEN (false) ELSE (null) END, num0, if(CASE WHEN ((num0 > num1)) THEN (false) WHEN ((num0 <= num1)) THEN (true) ELSE (null) END, num1, num2)) (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 3536 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(_col4)
+ keys: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: double)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 3536 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: double)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: double)
+ Statistics: Num rows: 17 Data size: 3536 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col4 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0)
+ keys: KEY._col0 (type: string), KEY._col1 (type: double), KEY._col2 (type: double), KEY._col3 (type: double)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1664 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1664 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,bigint,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"filtered","filter":{"type":"not","field":{"type":"selector","dimension":"num0","value":null}},"aggregator":{"type":"count","name":"$f1","fieldName":"num0"}},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: bigint), $f1 (type: bigint)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 - Calcs.num1) AS sum_z_num_minus_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 - Calcs.num1) AS sum_z_num_minus_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1,vc
+ druid.fieldTypes string,double,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" - \"num1\")","outputType":"DOUBLE"}],"columns":["key","num0","num1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ACOS((CASE WHEN 20 = 0 THEN NULL ELSE Calcs.num0 / 20 END)) AS sum_z_acos_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ACOS((CASE WHEN 20 = 0 THEN NULL ELSE Calcs.num0 / 20 END)) AS sum_z_acos_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), acos((num0 / 20.0)) (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"num0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), $f1 (type: double), $f1 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_le_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_le_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str2,str3,num3
+ druid.fieldTypes string,string,string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str2","str3","num3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str0 (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((num3 > 0.0)) THEN ((str2 <= lower(str0))) WHEN ((num3 <= 0.0)) THEN ((str2 <= str3)) ELSE ((str2 <= null)) END (type: boolean), num3 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_second_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_second_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,int1
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), int1 (type: int), key (type: string), from_unixtime((if((to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(vc,'yyyy-MM-dd')) + UDFToLong(int1)), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: timestamp with local time zone), KEY._col1 (type: int), KEY._col2 (type: string), KEY._col3 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((12 * YEAR(Calcs.`__time`) + MONTH(Calcs.`__time`)) - (12 * YEAR('2004-07-04') + MONTH('2004-07-04')) AS BIGINT) AS sum_z_datediff_month_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((12 * YEAR(Calcs.`__time`) + MONTH(Calcs.`__time`)) - (12 * YEAR('2004-07-04') + MONTH('2004-07-04')) AS BIGINT) AS sum_z_datediff_month_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(((12 * timestamp_extract(\"__time\",'YEAR','US/Pacific')) + timestamp_extract(\"__time\",'MONTH','US/Pacific')) - 24055)","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 ASC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 ASC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+ Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num2
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"not","field":{"type":"selector","dimension":"key","value":null}},"columns":["key","num2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num2 (type: double)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double)
+ Map 4
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ filterExpr: key is not null (type: boolean)
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default","limit":10,"columns":[{"dimension":"$f1","direction":"ascending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 18 Data size: 3590 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(_col1)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 18 Data size: 3590 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 18 Data size: 3590 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double)
+ Reducer 3
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 9 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 9 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LN(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_ln_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LN(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_ln_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), CASE WHEN ((num0 > 0.0)) THEN (ln(num0)) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)) AS none_calculation_0390402194730773_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END) AS none_calculation_2810402194531916_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_3240402194650458_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END) AS none_calculation_8020402194436198_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_8130402194627126_ok, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') AS none_calculation_8720402194759281_ok FROM druid_tableau.calcs Calcs WHERE (Calcs.key = 'key00' OR Calcs.key = 'key01') GROUP BY DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))), FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)) AS none_calculation_0390402194730773_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END) AS none_calculation_2810402194531916_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_3240402194650458_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END) AS none_calculation_8020402194436198_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_8130402194627126_ok, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') AS none_calculation_8720402194759281_ok FROM druid_tableau.calcs Calcs WHERE (Calcs.key = 'key00' OR Calcs.key = 'key01') GROUP BY DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))), FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key
+ druid.fieldTypes string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"or","fields":[{"type":"selector","dimension":"key","value":"key00"},{"type":"selector","dimension":"key","value":"key01"}]},"columns":["key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: date_add(CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END, 15) (type: date), CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END (type: string), if((to_unix_timestamp(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END,'yyyy-MM-dd HH:mm:ss') > 0), from_unixtime((to_unix_timestamp(date_add(to_date(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END), 15),'yyyy-MM-dd') + (to_unix_timestamp(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END,'yyyy-MM-dd HH:mm:ss') - to_unix_timestamp(to_date(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END),'yyyy-MM-dd'))), 'yyyy-MM-dd HH:mm:ss'), date_add(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END, 15)) (type: string), CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END (type: string), if((to_unix_timestamp(CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END,'yyyy-MM-dd HH:mm:ss') > 0), from_unixtime((to_unix_timestamp(date_add(to_date(CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END), 15),'yyyy-MM-dd') + (to_unix_timestamp(CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END,'yyyy-MM-dd HH:mm:ss') - to_unix_timestamp(to_date(CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END),'yyyy-MM-dd'))), 'yyyy-MM-dd HH:mm:ss'), date_add(CASE WHEN ((key = 'key00')) THEN ('1997-04-01') WHEN ((key <> 'key00')) THEN ('1997-10-15') ELSE (null) END, 15)) (type: string), from_unixtime((to_unix_timestamp(date_add(to_date(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END), 15),'yyyy-MM-dd') + (to_unix_timestamp(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END,'yyyy-MM-dd HH:mm:ss') - to_unix_timestamp(to_date(CASE WHEN ((key = 'key00')) THEN ('1997-04-01 00:00:42') WHEN ((key <> 'key00')) THEN ('1997-10-15 00:00:42') ELSE (null) END),'yyyy-MM-dd'))), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: date), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: date), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+ sort order: ++++++
+ Map-reduce partition columns: _col0 (type: date), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: date), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: string), KEY._col4 (type: string), KEY._col5 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 1472 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1472 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 > Calcs.num1) AS none_z_num_gt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 > Calcs.num1) AS none_z_num_gt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc,num0,num1
+ druid.fieldTypes string,boolean,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" > \"num1\")","outputType":"LONG"}],"columns":["key","vc","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean), num0 (type: double), num1 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 > '1975-11-12') AS none_z_date_gt_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 > '1975-11-12') AS none_z_date_gt_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,key,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"date0\" > '1975-11-12')","outputType":"LONG"}],"columns":["date0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,num0
+ druid.fieldTypes int,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"columns":["vc","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), num0 (type: double)
+ outputColumnNames: vc, num0
+ Statistics: Num rows: 17 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), avg(num0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 204 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: struct), _col4 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), avg(VALUE._col2), count(VALUE._col3)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 96 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col4 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, CONCAT(Calcs.str2,Calcs.str3) AS none_z_str_plus_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, CONCAT(Calcs.str2,Calcs.str3) AS none_z_str_plus_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,str3,vc
+ druid.fieldTypes string,string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"concat(\"str2\",\"str3\")","outputType":"STRING"}],"columns":["key","str2","str3","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str3 (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00') AS none_z_datetrunc_year_ok, YEAR(Calcs.`__time`) AS none_z_year_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00') AS none_z_datetrunc_year_ok, YEAR(Calcs.`__time`) AS none_z_year_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0,vc1
+ druid.fieldTypes timestamp with local time zone,string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"concat(timestamp_extract(\"__time\",'YEAR','US/Pacific'),'-01-01 00:00:00')","outputType":"STRING"},{"type":"expression","name":"vc1","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0","vc1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: string), vc1 (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS ctd_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_countd_date3__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS ctd_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_countd_date3__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date3
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: CASE WHEN (date3 is null) THEN (0) WHEN (date3 is not null) THEN (1) ELSE (null) END (type: int), date3 (type: string), key (type: string), CASE WHEN (date3 is null) THEN (0) WHEN (date3 is not null) THEN (1) ELSE (null) END (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, EXP((0.10000000000000001 * Calcs.num0)) AS sum_z_exp_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, EXP((0.10000000000000001 * Calcs.num0)) AS sum_z_exp_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"exp((0.10000000000000001 * \"num0\"))","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.num1 ELSE Calcs.num2 END)) AS sum_z_if_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.num1 ELSE Calcs.num2 END)) AS sum_z_if_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1,num2,$f4
+ druid.fieldTypes string,double,double,double,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"num0","outputName":"num0","outputType":"DOUBLE"},{"type":"default","dimension":"num1","outputName":"num1","outputType":"DOUBLE"},{"type":"default","dimension":"num2","outputName":"num2","outputType":"DOUBLE"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f4","expression":"case_searched((\"num0\" > \"num1\"),\"num0\",(\"num0\" <= \"num1\"),\"num1\",\"num2\")"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), num2 (type: double), $f4 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) - YEAR('2004-07-04') AS BIGINT) AS sum_z_datediff_year_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) - YEAR('2004-07-04') AS BIGINT) AS sum_z_datediff_year_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(timestamp_extract(\"__time\",'YEAR','US/Pacific') - 2004)","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 >= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ge_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 >= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ge_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str2,str3,num3
+ druid.fieldTypes string,string,string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str2","str3","num3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str0 (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((num3 > 0.0)) THEN ((str2 >= lower(str0))) WHEN ((num3 <= 0.0)) THEN ((str2 >= str3)) ELSE ((str2 >= null)) END (type: boolean), num3 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, (1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) AS sum_z_datepart_weekday_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, (1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) AS sum_z_datepart_weekday_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), (1 + if(false, (((if(false, ((datediff(to_date(vc), '1995-01-01') pmod 7) - 7), (datediff(to_date(vc), '1995-01-01') pmod 7)) + 7) pmod 7) - 7), ((if(false, ((datediff(to_date(vc), '1995-01-01') pmod 7) - 7), (datediff(to_date(vc), '1995-01-01') pmod 7)) + 7) pmod 7))) (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) - YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) AS BIGINT) AS sum_z_today_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) - YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) AS BIGINT) AS sum_z_today_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc
+ druid.fieldTypes string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"0","outputType":"LONG"}],"columns":["key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: bigint)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, MIN(Calcs.int0) AS min_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, MIN(Calcs.int0) AS min_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,int
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"longMin","name":"$f1","fieldName":"int0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), $f1 (type: int)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DAY(Calcs.`__time`) AS none_z_day_ok, DAY(Calcs.`__time`) AS sum_z_datepart_day_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DAY(Calcs.`__time`) AS none_z_day_ok, DAY(Calcs.`__time`) AS sum_z_datepart_day_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0,vc1
+ druid.fieldTypes timestamp with local time zone,string,int,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'DAY','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc1","expression":"timestamp_extract(\"__time\",'DAY','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0","vc1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: int), vc1 (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,num0,vc0
+ druid.fieldTypes int,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["vc","num0","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), num0 (type: double), vc0 (type: double)
+ outputColumnNames: vc, num0, vc0
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc0), stddev_pop(num0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct), _col5 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), stddev_pop(VALUE._col3), count(VALUE._col4)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col5 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq2_num_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq_num_nk, ABS(Calcs.num0) AS sum_abs_num0__ok, Calcs.num0 AS sum_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq2_num_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq_num_nk, ABS(Calcs.num0) AS sum_abs_num0__ok, Calcs.num0 AS sum_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc,vc0,vc1,num0
+ druid.fieldTypes string,boolean,boolean,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" == abs(\"num0\"))","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num0\" == abs(\"num0\"))","outputType":"LONG"},{"type":"expression","name":"vc1","expression":"abs(\"num0\")","outputType":"DOUBLE"}],"columns":["key","vc","vc0","vc1","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean), vc0 (type: boolean), vc1 (type: double), num0 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS cnt_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS cnt_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date3
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: CASE WHEN (date3 is null) THEN (0) WHEN (date3 is not null) THEN (1) ELSE (null) END (type: int), date3 (type: string), key (type: string), CASE WHEN (date3 is null) THEN (0) WHEN (date3 is not null) THEN (1) ELSE (null) END (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str1,str2,str3
+ druid.fieldTypes string,string,string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str1","str2","str3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 15640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), str0 (type: string), str1 (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((str0 > str1)) THEN (str2) WHEN ((str0 <= str1)) THEN (str3) ELSE (null) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 15640 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 15640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+ sort order: ++++++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+ Statistics: Num rows: 17 Data size: 15640 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: string), KEY._col4 (type: string), KEY._col5 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 7360 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 7360 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, Calcs.str2 RLIKE CONCAT('.*', Calcs.str3, '.*') AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, Calcs.str2 RLIKE CONCAT('.*', Calcs.str3, '.*') AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,str3
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2","str3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str3 (type: string), str2 regexp concat('.*', str3, '.*') (type: boolean)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(DISTINCT Calcs.num0) AS ctd_num0_ok, COUNT(DISTINCT Calcs.num0) AS usr_z_countd_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(DISTINCT Calcs.num0) AS ctd_num0_ok, COUNT(DISTINCT Calcs.num0) AS usr_z_countd_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"num0","outputName":"num0","outputType":"DOUBLE"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double)
+ outputColumnNames: key, num0
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count(num0)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: bigint), _col1 (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_dayofyear_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_dayofyear_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), datediff(vc, '2004-07-04') (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num4 AS sum_num4_ok, ROUND(Calcs.num4,1) AS sum_z_round_num_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num4 AS sum_num4_ok, ROUND(Calcs.num4,1) AS sum_z_round_num_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num4
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num4 (type: double), round(num4, 1) (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date2 AS none_date2_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.date3) + CAST((MONTH(Calcs.date3) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR(Calcs.date2) + CAST((MONTH(Calcs.date2) - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date2 AS none_date2_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.date3) + CAST((MONTH(Calcs.date3) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR(Calcs.date2) + CAST((MONTH(Calcs.date2) - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date2,date3,key,vc
+ druid.fieldTypes string,string,string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(((4 * timestamp_extract(\"date3\",'YEAR','US/Pacific')) + CAST(((CAST((timestamp_extract(\"date3\",'MONTH','US/Pacific') - 1), 'DOUBLE') / CAST(3, 'DOUBLE')) + CAST(1, 'DOUBLE')), 'LONG')) - ((4 * timestamp_extract(\"date2\",'YEAR','US/Pacific')) + CAST(((CAST((timestamp_extract(\"date2\",'MONTH','US/Pacific') - 1), 'DOUBLE') / CAST(3, 'DOUBLE')) + CAST(1, 'DOUBLE')), 'LONG')))","outputType":"LONG"}],"columns":["date2","date3","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date2 (type: string), date3 (type: string), key (type: string), vc (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.num4 AS STRING) AS none_z_str_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.num4 AS STRING) AS none_z_str_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num4,vc
+ druid.fieldTypes string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"num4\"","outputType":"DOUBLE"}],"columns":["key","num4","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num4 (type: string), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) < Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_min_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) < Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_min_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str2
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), lower(str0) (type: string), str2 (type: string), CASE WHEN ((lower(str0) is null or str2 is null)) THEN (null) WHEN ((lower(str0) < str2)) THEN (lower(str0)) ELSE (str2) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int0
+ druid.fieldTypes string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int0 (type: int), CASE WHEN ((int0 < 0)) THEN (null) ELSE (power(int0, 0.5)) END (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_z_len_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_z_len_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,vc
+ druid.fieldTypes string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"strlen(\"str2\")","outputType":"LONG"}],"columns":["key","str2","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), vc (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ROUND(Calcs.num0) AS sum_z_round_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ROUND(Calcs.num0) AS sum_z_round_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), round(num0) (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2) AS daydiffs1__bin_, FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3) AS daydiffs2__bin_, FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4) AS daydiffs3__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2) AS yeardiffs1__bin_, FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3) AS yeardiffs2__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) AS yeardiffs3__bin_ FROM druid_tableau.calcs Calcs GROUP BY FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2), FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3), FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2), FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2) AS daydiffs1__bin_, FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3) AS daydiffs2__bin_, FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4) AS daydiffs3__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2) AS yeardiffs1__bin_, FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3) AS yeardiffs2__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) AS yeardiffs3__bin_ FROM druid_tableau.calcs Calcs GROUP BY FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2), FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3), FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2), FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,date0,date1
+ druid.fieldTypes timestamp with local time zone,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","date0","date1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 6936 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: floor((UDFToDouble(datediff(date0, vc)) / 2.0)) (type: bigint), floor((UDFToDouble(datediff(vc, date0)) / 3.0)) (type: bigint), floor((UDFToDouble(datediff(date0, date1)) / 4.0)) (type: bigint), floor((UDFToDouble(UDFToLong((year(date0) - year(vc)))) / 2.0)) (type: bigint), floor((UDFToDouble(UDFToLong((year(vc) - year(date0)))) / 3.0)) (type: bigint), floor((UDFToDouble(UDFToLong((year(date0) - year(date1)))) / 4.0)) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 6936 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: bigint)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 6936 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: bigint)
+ sort order: ++++++
+ Map-reduce partition columns: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: bigint)
+ Statistics: Num rows: 17 Data size: 6936 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: bigint), KEY._col1 (type: bigint), KEY._col2 (type: bigint), KEY._col3 (type: bigint), KEY._col4 (type: bigint), KEY._col5 (type: bigint)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_weekday_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_weekday_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), datediff(vc, '2004-07-04') (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (1 IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (1 IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,str3
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2","str3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str3 (type: string), CASE WHEN (character_length(str3) is null) THEN ((null = str3)) WHEN ((character_length(str3) < 1)) THEN (('' = str3)) ELSE ((substring(str2, 1, character_length(str3)) = str3)) END (type: boolean)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,str2,vc0
+ druid.fieldTypes int,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"strlen(\"str2\")","outputType":"LONG"}],"columns":["vc","str2","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), str2 (type: string), vc0 (type: int)
+ outputColumnNames: vc, str2, vc0
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: max(str2), max(vc0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: max(VALUE._col0), max(VALUE._col1), count(VALUE._col2)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: string), _col2 (type: int), _col3 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col3 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 384 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: string), _col2 (type: int)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 2 Data size: 384 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 384 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS STRING) AS none_z_datename_quarter_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS STRING) AS none_z_datename_quarter_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"CAST(CAST(((CAST((timestamp_extract(\"__time\",'MONTH','US/Pacific') - 1), 'DOUBLE') / CAST(3, 'DOUBLE')) + CAST(1, 'DOUBLE')), 'LONG'), 'STRING')","outputType":"STRING"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc
+ druid.fieldTypes int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"date0\",'YEAR','US/Pacific')","outputType":"LONG"}],"columns":["vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: int)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) AS str2__group_, AVG(Calcs.num0) AS avg_num0_ok, COUNT(Calcs.num0) AS cnt_num0_ok, SUM(Calcs.num0) AS sum_num0_ok FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) AS str2__group_, AVG(Calcs.num0) AS avg_num0_ok, COUNT(Calcs.num0) AS cnt_num0_ok, SUM(Calcs.num0) AS sum_num0_ok FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["str2","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: CASE WHEN (((str2 = 'eight') or (str2 = 'eleven'))) THEN ('eight') WHEN (((str2 = 'fifteen') or (str2 = 'five') or (str2 = 'fourteen'))) THEN ('fifteen') WHEN ((str2 = 'nine')) THEN ('nine') WHEN ((str2 = 'one')) THEN ('one') WHEN (((str2 = 'six') or (str2 = 'sixteen'))) THEN ('six') WHEN (((str2 = 'ten') or (str2 = 'three') or (str2 = 'twelve') or (str2 = 'two'))) THEN ('ten') ELSE (null) END (type: string), num0 (type: double)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: avg(_col1), count(_col1), sum(_col1)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: struct), _col2 (type: bigint), _col3 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: avg(VALUE._col0), count(VALUE._col1), sum(VALUE._col2)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM((((((((((CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT))) AS sum_maxint_sum_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM((((((((((CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT))) AS sum_maxint_sum_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,bigint,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"longSum","name":"$f1","expression":"21474836470"},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: bigint)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num1 AS none_num1_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num1 AS none_num1_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,vc0,num1
+ druid.fieldTypes boolean,boolean,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"},{"type":"default","dimension":"vc0","outputName":"vc0","outputType":"STRING"},{"type":"default","dimension":"num1","outputName":"num1","outputType":"DOUBLE"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num1\" > 10)","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num2\" > 10)","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: vc (type: boolean), vc0 (type: boolean), num1 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 = (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_eq_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 = (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_eq_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,str3,num3
+ druid.fieldTypes string,string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2","str3","num3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((num3 > 0.0)) THEN ((str2 = str2)) WHEN ((num3 <= 0.0)) THEN ((str2 = str3)) ELSE ((str2 = null)) END (type: boolean), num3 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) AS none_z_if_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) AS none_z_if_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,date1,date2,key,num0,num1,vc
+ druid.fieldTypes string,string,string,string,double,double,string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"date0","outputName":"date0","outputType":"STRING"},{"type":"default","dimension":"date1","outputName":"date1","outputType":"STRING"},{"type":"default","dimension":"date2","outputName":"date2","outputType":"STRING"},{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"num0","outputName":"num0","outputType":"DOUBLE"},{"type":"default","dimension":"num1","outputName":"num1","outputType":"DOUBLE"},{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"case_searched((\"num0\" > \"num1\"),\"date0\",(\"num0\" <= \"num1\"),\"date1\",\"date2\")","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: date0 (type: string), date1 (type: string), date2 (type: string), key (type: string), num0 (type: double), num1 (type: double), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) / LOG10(2) ELSE NULL END) AS sum_z_log2_num_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_log_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) / LOG10(2) ELSE NULL END) AS sum_z_log2_num_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_log_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), CASE WHEN ((num0 > 0.0)) THEN ((log10(num0) / 0.3010299956639812)) ELSE (null) END (type: double), CASE WHEN ((num0 > 0.0)) THEN (log10(num0)) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ABS(Calcs.num0) AS sum_z_abs_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ABS(Calcs.num0) AS sum_z_abs_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"abs(\"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_stdev_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdev_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdev_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_stdev_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdev_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdev_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), (num0 * num0) (type: double), key (type: string), null (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) AS none_z_case_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) AS none_z_case_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str2,str3,num0,num1
+ druid.fieldTypes string,string,string,string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str2","str3","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), str0 (type: string), str2 (type: string), str3 (type: string), if(CASE WHEN ((num0 > num1)) THEN (true) WHEN ((num0 <= num1)) THEN (false) ELSE (null) END, str2, if(CASE WHEN ((num0 > num1)) THEN (false) WHEN ((num0 <= num1)) THEN (true) ELSE (null) END, str3, str0)) (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string)
+ sort order: +++++++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string)
+ Statistics: Num rows: 17 Data size: 12784 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: string), KEY._col1 (type: double), KEY._col2 (type: double), KEY._col3 (type: string), KEY._col4 (type: string), KEY._col5 (type: string), KEY._col6 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Statistics: Num rows: 8 Data size: 6016 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 6016 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS STRING) AS none_z_datename_dayofyear_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS STRING) AS none_z_datename_dayofyear_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), UDFToString((datediff(vc, concat(year(vc), '-01-01 00:00:00')) + 1)) (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT COUNT(Calcs.date3) AS cnt_date3_ok, COUNT(Calcs.date3) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT COUNT(Calcs.date3) AS cnt_date3_ok, COUNT(Calcs.date3) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,bigint,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"filtered","filter":{"type":"not","field":{"type":"selector","dimension":"date3","value":null}},"aggregator":{"type":"count","name":"$f1","fieldName":"date3"}},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: bigint), $f1 (type: bigint)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num4 IS NULL) AS none_z_isnull_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num4 IS NULL) AS none_z_isnull_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc,num4
+ druid.fieldTypes string,boolean,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num4\" == null)","outputType":"LONG"}],"columns":["key","vc","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean), num4 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS sum_z_float_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS sum_z_float_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int0,num4,vc
+ druid.fieldTypes string,string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"CAST(concat(CAST(\"num4\", 'STRING'),CAST(\"int0\", 'STRING')), 'DOUBLE')","outputType":"DOUBLE"}],"columns":["key","int0","num4","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int0 (type: string), num4 (type: string), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(SECOND(Calcs.`__time`), 0), 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(SECOND(Calcs.`__time`), 0), 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), from_unixtime((if((to_unix_timestamp(from_unixtime((if((to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd')) + UDFToLong((COALESCE(minute(vc),0) * 60))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd')) + UDFToLong((COALESCE(minute(vc),0) * 60))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd')) + UDFToLong((COALESCE(minute(vc),0) * 60))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd')) + UDFToLong(COALESCE(second(vc),0))), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, floor((datediff(Calcs.`__time`,'1995-01-01') - ( pmod(datediff(Calcs.`__time`, '1995-01-01'), 7) + 1) - datediff('2004-07-04','1995-01-01') + (pmod(datediff('2004-07-04', '1995-01-01'), 7) + 1))/7) AS sum_z_datediff_week_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, floor((datediff(Calcs.`__time`,'1995-01-01') - ( pmod(datediff(Calcs.`__time`, '1995-01-01'), 7) + 1) - datediff('2004-07-04','1995-01-01') + (pmod(datediff('2004-07-04', '1995-01-01'), 7) + 1))/7) AS sum_z_datediff_week_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), floor((UDFToDouble((((datediff(vc, '1995-01-01') - ((datediff(vc, '1995-01-01') pmod 7) + 1)) - 3472) + 1)) / 7.0)) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs WHERE (YEAR(Calcs.date0) IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs WHERE (YEAR(Calcs.date0) IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames
+ druid.fieldTypes
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"date0","value":null,"extractionFn":{"type":"timeFormat","format":"yyyy","timeZone":"US/Pacific","locale":"en-US"}},"columns":[],"resultFormat":"compactedList","limit":1}
+ druid.query.type scan
+ Select Operator
+ expressions: null (type: int)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, COALESCE(Calcs.str2, 'i\'m null') AS none_z_ifnull_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, COALESCE(Calcs.str2, 'i\'m null') AS none_z_ifnull_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), COALESCE(str2,'i'm null') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_month_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_month_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,int1
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), int1 (type: int), key (type: string), CASE WHEN ((vc is not null and int1 is not null)) THEN (from_unixtime(to_unix_timestamp(concat((UDFToLong(year(vc)) + floor((UDFToDouble((month(vc) + int1)) / 12.0))), concat('-', concat(lpad(((month(vc) + int1) pmod 12), 2, '0'), substr(vc, 8)))),substr('yyyy-MM-dd HH:mm:ss', 0, character_length(UDFToString(vc)))), 'yyyy-MM-dd HH:mm:ss')) ELSE (null) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: timestamp with local time zone), KEY._col1 (type: int), KEY._col2 (type: string), KEY._col3 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS sum_z_datepart_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS sum_z_datepart_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"CAST(((CAST((timestamp_extract(\"__time\",'MONTH','US/Pacific') - 1), 'DOUBLE') / CAST(3, 'DOUBLE')) + CAST(1, 'DOUBLE')), 'LONG')","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) > Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_max_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) > Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_max_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str2
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), lower(str0) (type: string), str2 (type: string), CASE WHEN ((lower(str0) is null or str2 is null)) THEN (null) WHEN ((lower(str0) > str2)) THEN (lower(str0)) ELSE (str2) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs WHERE (NOT ((Calcs.str2 IS NULL) OR ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'six')))) GROUP BY Calcs.str2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs WHERE (NOT ((Calcs.str2 IS NULL) OR ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'six')))) GROUP BY Calcs.str2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2
+ druid.fieldTypes string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"}],"limitSpec":{"type":"default"},"filter":{"type":"and","fields":[{"type":"or","fields":[{"type":"bound","dimension":"str2","upper":"eight","upperStrict":true,"ordering":"lexicographic"},{"type":"bound","dimension":"str2","lower":"six","lowerStrict":true,"ordering":"lexicographic"}]},{"type":"not","field":{"type":"selector","dimension":"str2","value":null}}]},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: str2 (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT MONTH(Calcs.`__time`) AS mn_datetime0_ok FROM druid_tableau.calcs Calcs WHERE (MONTH(Calcs.`__time`) IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT MONTH(Calcs.`__time`) AS mn_datetime0_ok FROM druid_tableau.calcs Calcs WHERE (MONTH(Calcs.`__time`) IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames
+ druid.fieldTypes
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"__time","value":null,"extractionFn":{"type":"timeFormat","format":"M","timeZone":"US/Pacific","locale":"en-US"}},"columns":[],"resultFormat":"compactedList","limit":1}
+ druid.query.type scan
+ Select Operator
+ expressions: null (type: int)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, Calcs.str1 RLIKE CONCAT('.*', 'IN', '.*') AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, Calcs.str1 RLIKE CONCAT('.*', 'IN', '.*') AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str1
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str1 (type: string), str1 regexp '.*IN.*' (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), CAST((-1) AS DOUBLE)) AS none_z_ifnull_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), CAST((-1) AS DOUBLE)) AS none_z_ifnull_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num4
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), COALESCE(num4,-1.0) (type: double), num4 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), $f1 (type: double), $f1 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str1,str2,str3,vc
+ druid.fieldTypes string,string,string,string,string,string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"str0","outputName":"str0","outputType":"STRING"},{"type":"default","dimension":"str1","outputName":"str1","outputType":"STRING"},{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"},{"type":"default","dimension":"str3","outputName":"str3","outputType":"STRING"},{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"case_searched((\"str0\" > \"str1\"),\"str2\",\"str3\")","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), str0 (type: string), str1 (type: string), str2 (type: string), str3 (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (32000 + Calcs.num4) AS none_bignum_ok FROM druid_tableau.calcs Calcs GROUP BY (32000 + Calcs.num4)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (32000 + Calcs.num4) AS none_bignum_ok FROM druid_tableau.calcs Calcs GROUP BY (32000 + Calcs.num4)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc
+ druid.fieldTypes double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"DOUBLE"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"(CAST(32000, 'DOUBLE') + \"num4\")","outputType":"DOUBLE"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: vc (type: double)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,num0,vc0
+ druid.fieldTypes int,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["vc","num0","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), num0 (type: double), vc0 (type: double)
+ outputColumnNames: vc, num0, vc0
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc0), var_pop(num0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct), _col5 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), var_pop(VALUE._col3), count(VALUE._col4)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col5 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) IS NULL) OR (LENGTH('ES') IS NULL) THEN NULL WHEN LENGTH('ES') < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str1),CAST(1 AS INT),CAST(LENGTH('ES') AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str1),CAST((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) AS INT),CAST(LENGTH('ES') AS INT)) END) = 'ES' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) IS NULL) OR (LENGTH('ES') IS NULL) THEN NULL WHEN LENGTH('ES') < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str1),CAST(1 AS INT),CAST(LENGTH('ES') AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str1),CAST((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) AS INT),CAST(LENGTH('ES') AS INT)) END) = 'ES' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str1
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str1 (type: string), CASE WHEN (CASE WHEN (((character_length(str1) - 2) < 0)) THEN (false) ELSE (((character_length(str1) - 2) + 1) is null) END) THEN (null) WHEN (CASE WHEN (((character_length(str1) - 2) < 0)) THEN (false) ELSE ((((character_length(str1) - 2) + 1) < 1)) END) THEN ((substring(rtrim(str1), 1, 2) = 'ES')) ELSE ((substring(rtrim(str1), CASE WHEN (((character_length(str1) - 2) < 0)) THEN (1) ELSE (((character_length(str1) - 2) + 1)) END, 2) = 'ES')) END (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,double,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMin","name":"$f1","fieldName":"num0"},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: double), $f1 (type: double)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str2),CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str2),CAST((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str2),CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str2),CAST((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,str3
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2","str3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((CASE WHEN (((character_length(str2) - character_length(str3)) < 0)) THEN (false) ELSE (((character_length(str2) - character_length(str3)) + 1) is null) END or character_length(str3) is null)) THEN ((null = str3)) WHEN ((character_length(str3) < 1)) THEN (('' = str3)) WHEN (CASE WHEN (((character_length(str2) - character_length(str3)) < 0)) THEN (false) ELSE ((((character_length(str2) - character_length(str3)) + 1) < 1)) END) THEN ((substring(rtrim(str2), 1, character_length(str3)) = str3)) ELSE ((substring(rtrim(str2), CASE WHEN (((character_length(str2) - character_length(str3)) < 0)) THEN (1) ELSE (((character_length(str2) - character_length(str3)) + 1)) END, character_length(str3)) = str3)) END (type: boolean)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, UPPER(Calcs.str2) AS none_z_upper_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, UPPER(Calcs.str2) AS none_z_upper_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,vc
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"upper(\"str2\")","outputType":"STRING"}],"columns":["key","str2","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), vc (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), 0.0) AS none_z_zn_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), 0.0) AS none_z_zn_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num4
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), COALESCE(num4,0) (type: double), num4 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) AS str2__group__1, SUM(1) AS sum_number_of_records_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) AS str2__group__1, SUM(1) AS sum_number_of_records_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2
+ druid.fieldTypes string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: str2 (type: string), CASE WHEN (str2 is null) THEN (null) WHEN (((str2 = 'eight') or (str2 = 'eleven'))) THEN ('eight') WHEN (((str2 = 'sixteen') or (str2 = 'ten'))) THEN ('sixteen') WHEN (((str2 = 'three') or (str2 = 'twelve'))) THEN ('three') ELSE ('two') END (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(1)
+ keys: _col0 (type: string), _col1 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col2 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0)
+ keys: KEY._col0 (type: string), KEY._col1 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 8 Data size: 1472 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1472 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, CASE WHEN Calcs.int2 = 0 THEN NULL ELSE ( Calcs.int3 / Calcs.int2 ) END AS sum_z_div_int_zero_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, CASE WHEN Calcs.int2 = 0 THEN NULL ELSE ( Calcs.int3 / Calcs.int2 ) END AS sum_z_div_int_zero_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int2,int3
+ druid.fieldTypes string,int,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int2","int3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int2 (type: int), int3 (type: int), CASE WHEN ((int2 = 0)) THEN (null) ELSE ((UDFToDouble(int3) / UDFToDouble(int2))) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,int0,vc0
+ druid.fieldTypes int,int,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"\"int0\"","outputType":"DOUBLE"}],"columns":["vc","int0","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 272 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), int0 (type: int), vc0 (type: double)
+ outputColumnNames: vc, int0, vc0
+ Statistics: Num rows: 17 Data size: 272 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count(int0), sum(int0), avg(vc0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 272 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 272 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: struct), _col4 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0), sum(VALUE._col1), avg(VALUE._col2), count(VALUE._col3)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 128 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: double), _col4 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 128 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col4 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 32 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS min_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS min_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames num0,key,vc
+ druid.fieldTypes double,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"num0\"","outputType":"DOUBLE"}],"columns":["num0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), key (type: string), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, SECOND(Calcs.`__time`) AS sum_z_datepart_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, SECOND(Calcs.`__time`) AS sum_z_datepart_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'SECOND','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS sum_z_datepart_dayofyear_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS sum_z_datepart_dayofyear_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), (datediff(vc, concat(year(vc), '-01-01 00:00:00')) + 1) (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 = '1972-07-04') AS none_z_date_eq_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 = '1972-07-04') AS none_z_date_eq_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,key,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"date0\" == '1972-07-04')","outputType":"LONG"}],"columns":["date0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_day_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_day_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), datediff(vc, '2004-07-04') (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) * 60 + COALESCE(SECOND(Calcs.`__time`), 0) - COALESCE(SECOND('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) * 60 + COALESCE(SECOND(Calcs.`__time`), 0) - COALESCE(SECOND('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), UDFToLong((((((((((datediff(vc, '2004-07-04') * 24) + COALESCE(hour(vc),0)) - 0) * 60) + COALESCE(minute(vc),0)) - 0) * 60) + COALESCE(second(vc),0)) - 0)) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) AS none_b11703_nk FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) AS none_b11703_nk FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc
+ druid.fieldTypes string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"concat(concat(concat('Q',CAST(CAST(((CAST((timestamp_extract(\"date0\",'MONTH','US/Pacific') - 1), 'DOUBLE') / CAST(3, 'DOUBLE')) + CAST(1, 'DOUBLE')), 'LONG'), 'STRING')),'-'),CAST(timestamp_extract(\"date0\",'YEAR','US/Pacific'), 'STRING'))","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: vc (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN (1 IS NULL) OR (LENGTH('BI') IS NULL) THEN NULL WHEN LENGTH('BI') < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) ELSE SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) END) = 'BI' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN (1 IS NULL) OR (LENGTH('BI') IS NULL) THEN NULL WHEN LENGTH('BI') < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) ELSE SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) END) = 'BI' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str1,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(substring(\"str1\", 0, 2) == 'BI')","outputType":"LONG"}],"columns":["key","str1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str1 (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_minute_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_minute_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,int1
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), int1 (type: int), key (type: string), from_unixtime((if((to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(vc,'yyyy-MM-dd')) + UDFToLong((int1 * 60))), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: timestamp with local time zone), KEY._col1 (type: int), KEY._col2 (type: string), KEY._col3 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS cnt_str2_ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS cnt_str2_ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: CASE WHEN (str2 is null) THEN (0) WHEN (str2 is not null) THEN (1) ELSE (null) END (type: int), key (type: string), str2 (type: string), CASE WHEN (str2 is null) THEN (0) WHEN (str2 is not null) THEN (1) ELSE (null) END (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 < Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_min_date_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 < Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_min_date_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date0,date1
+ druid.fieldTypes string,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date0","date1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), date1 (type: string), key (type: string), CASE WHEN ((date0 is null or date1 is null)) THEN (null) WHEN ((date0 < date1)) THEN (date0) ELSE (date1) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 >= '1975-11-12') AS none_z_date_ge_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 >= '1975-11-12') AS none_z_date_ge_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,key,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"date0\" >= '1975-11-12')","outputType":"LONG"}],"columns":["date0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (Calcs.str2 IS NULL) AS none_z_isnull_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (Calcs.str2 IS NULL) AS none_z_isnull_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"str2\" == null)","outputType":"LONG"}],"columns":["key","str2","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) AS STRING) AS none_z_datename_year_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) AS STRING) AS none_z_datename_year_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"CAST(timestamp_extract(\"__time\",'YEAR','US/Pacific'), 'STRING')","outputType":"STRING"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) AS none_z_case_null_null_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date1, Calcs.key, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) AS none_z_case_null_null_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date1, Calcs.key, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date1
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 6256 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: date1 (type: string), key (type: string), if((((1 + if(false, (((if(false, ((datediff(to_date(date1), '1995-01-01') pmod 7) - 7), (datediff(to_date(date1), '1995-01-01') pmod 7)) + 7) pmod 7) - 7), ((if(false, ((datediff(to_date(date1), '1995-01-01') pmod 7) - 7), (datediff(to_date(date1), '1995-01-01') pmod 7)) + 7) pmod 7))) = 7) or ((1 + if(false, (((if(false, ((datediff(to_date(date1), '1995-01-01') pmod 7) - 7), (datediff(to_date(date1), '1995-01-01') pmod 7)) + 7) pmod 7) - 7), ((if(false, ((datediff(to_date(date1), '1995-01-01') pmod 7) - 7), (datediff(to_date(date1), '1995-01-01') pmod 7)) + 7) pmod 7))) = 1)), null, date1) (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 17 Data size: 6256 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 17 Data size: 6256 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ sort order: +++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ Statistics: Num rows: 17 Data size: 6256 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_avg_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_avg_num0___2730138885__0_, Calcs.num0 AS avg_num0_ok, Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS temp_z_avg_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_avg_num0___2730138885__0_, Calcs.num0 AS avg_num0_ok, Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), CASE WHEN (num0 is null) THEN (0) WHEN (num0 is not null) THEN (1) ELSE (null) END (type: int), num0 (type: double), key (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int0,vc
+ druid.fieldTypes string,int,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"int0\"","outputType":"DOUBLE"}],"columns":["key","int0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3332 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), int0 (type: int), vc (type: double)
+ outputColumnNames: key, int0, vc
+ Statistics: Num rows: 17 Data size: 3332 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count(int0), sum(int0), avg(vc)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3332 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3332 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: struct)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0), sum(VALUE._col1), avg(VALUE._col2)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1568 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1568 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_dayofyear_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_dayofyear_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), to_date(vc) (type: date)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 < 0 THEN CAST(NULL AS DOUBLE) ELSE SQRT(Calcs.num0) END) AS sum_z_sqrt_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 < 0 THEN CAST(NULL AS DOUBLE) ELSE SQRT(Calcs.num0) END) AS sum_z_sqrt_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), CASE WHEN ((num0 < 0.0)) THEN (null) ELSE (sqrt(num0)) END (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,double,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num0"},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: double), $f1 (type: double)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 > Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_max_num_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 > Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_max_num_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num4
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num4 (type: double), CASE WHEN ((num0 is null or num4 is null)) THEN (null) WHEN ((num0 > num4)) THEN (num0) ELSE (num4) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) AS none_z_dateadd_year_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) AS none_z_dateadd_year_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,int1
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), int1 (type: int), key (type: string), concat((year(vc) + int1), substr(from_unixtime(to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: timestamp with local time zone), KEY._col1 (type: int), KEY._col2 (type: string), KEY._col3 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"filtered","filter":{"type":"not","field":{"type":"selector","dimension":"num0","value":null}},"aggregator":{"type":"count","name":"$f1","fieldName":"num0"}}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), $f1 (type: bigint), $f1 (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (-Calcs.num0) AS sum_z_neg_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (-Calcs.num0) AS sum_z_neg_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(- \"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,num0,vc0
+ druid.fieldTypes int,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["vc","num0","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), num0 (type: double), vc0 (type: double)
+ outputColumnNames: vc, num0, vc0
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc0), var_samp(num0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 340 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct), _col5 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), var_samp(VALUE._col3), count(VALUE._col4)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 160 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col5 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), CASE WHEN ((_col2 > 1)) THEN (_col4) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 < Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_min_num_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 < Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_min_num_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num4
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num4 (type: double), CASE WHEN ((num0 is null or num4 is null)) THEN (null) WHEN ((num0 < num4)) THEN (num0) ELSE (num4) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DAY(Calcs.`__time`) AS STRING) AS none_z_datename_day_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DAY(Calcs.`__time`) AS STRING) AS none_z_datename_day_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"CAST(timestamp_extract(\"__time\",'DAY','US/Pacific'), 'STRING')","outputType":"STRING"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, SECOND(Calcs.time1) AS sum_z_timepart_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, SECOND(Calcs.time1) AS sum_z_timepart_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,time1,vc
+ druid.fieldTypes string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"time1\",'SECOND','US/Pacific')","outputType":"LONG"}],"columns":["key","time1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), time1 (type: string), vc (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 IS NULL) AS none_z_isnull_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 IS NULL) AS none_z_isnull_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,key,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"date0\" == null)","outputType":"LONG"}],"columns":["date0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int1 AS sum_int1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int1 AS sum_int1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int1
+ druid.fieldTypes string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int1 (type: int)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.int0) AS sum_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.int0) AS sum_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"longSum","name":"$f1","fieldName":"int0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), $f1 (type: bigint)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, false AS none_z_false_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, false AS none_z_false_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc
+ druid.fieldTypes string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"0","outputType":"LONG"}],"columns":["key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2 ORDER BY none_str2_nk ASC
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2 ORDER BY none_str2_nk ASC
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2
+ druid.fieldTypes string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"}],"limitSpec":{"type":"default","columns":[{"dimension":"str2","direction":"ascending","dimensionOrder":"lexicographic"}]},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: str2 (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MONTH(Calcs.`__time`) AS none_z_month_ok, MONTH(Calcs.`__time`) AS sum_z_datepart_month_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MONTH(Calcs.`__time`) AS none_z_month_ok, MONTH(Calcs.`__time`) AS sum_z_datepart_month_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0,vc1
+ druid.fieldTypes timestamp with local time zone,string,int,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'MONTH','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc1","expression":"timestamp_extract(\"__time\",'MONTH','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0","vc1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: int), vc1 (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 DESC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 DESC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+ Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num2
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"not","field":{"type":"selector","dimension":"key","value":null}},"columns":["key","num2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num2 (type: double)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double)
+ Map 4
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ filterExpr: key is not null (type: boolean)
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default","limit":10,"columns":[{"dimension":"$f1","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"num2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Merge Join Operator
+ condition map:
+ Inner Join 0 to 1
+ keys:
+ 0 _col0 (type: string)
+ 1 _col0 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 18 Data size: 3590 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(_col1)
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 18 Data size: 3590 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 18 Data size: 3590 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double)
+ Reducer 3
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 9 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 9 Data size: 1795 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 + Calcs.num1) AS sum_z_num_plus_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 + Calcs.num1) AS sum_z_num_plus_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1,vc
+ druid.fieldTypes string,double,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" + \"num1\")","outputType":"DOUBLE"}],"columns":["key","num0","num1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), from_unixtime((if((to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(from_unixtime((if((to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(concat(to_date(vc), ' 00:00:00'),'yyyy-MM-dd')) + UDFToLong((COALESCE(hour(vc),0) * 3600))), 'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd')) + UDFToLong((COALESCE(minute(vc),0) * 60))), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS BIGINT) AS sum_z_int_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS BIGINT) AS sum_z_int_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int0,num4,vc
+ druid.fieldTypes string,string,string,bigint
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"CAST(CAST(concat(CAST(\"num4\", 'STRING'),CAST(\"int0\", 'STRING')), 'DOUBLE'), 'LONG')","outputType":"LONG"}],"columns":["key","int0","num4","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int0 (type: string), num4 (type: string), vc (type: bigint)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: key, num0, vc
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc), stddev_samp(num0)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), stddev_samp(VALUE._col3)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), CASE WHEN ((_col2 > 1)) THEN (_col4) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, COS(Calcs.num0) AS sum_z_cos_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, COS(Calcs.num0) AS sum_z_cos_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"cos(\"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.num0 AS max_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.num0 AS max_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames num0,key,vc
+ druid.fieldTypes double,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"num0\"","outputType":"DOUBLE"}],"columns":["num0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: num0 (type: double), key (type: string), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: key, num0, vc
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc), stddev_pop(num0)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), stddev_pop(VALUE._col3)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, HOUR(Calcs.`__time`) AS sum_z_datepart_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, HOUR(Calcs.`__time`) AS sum_z_datepart_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'HOUR','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str2 AS temp_z_max_str2___3598104523__0_, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_len_str2__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str2 AS temp_z_max_str2___3598104523__0_, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_len_str2__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2,key,vc,vc0
+ druid.fieldTypes string,string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"str2\"","outputType":"STRING"},{"type":"expression","name":"vc0","expression":"strlen(\"str2\")","outputType":"LONG"}],"columns":["str2","key","vc","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: str2 (type: string), key (type: string), vc (type: string), vc0 (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) AS str2__bin_ FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) AS str2__bin_ FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str2
+ druid.fieldTypes string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: CASE WHEN (str2 is null) THEN (null) WHEN (((str2 = 'one') or (str2 = 'three') or (str2 = 'two'))) THEN ('one') WHEN ((str2 = 'eight')) THEN (null) ELSE (str2) END (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3128 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 8 Data size: 1472 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1472 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" * \"num0\")","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: key, num0, vc
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), sum(vc), var_pop(num0)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), var_pop(VALUE._col3)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <> (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ne_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <> (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ne_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,str3,num3
+ druid.fieldTypes string,string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2","str3","num3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((num3 > 0.0)) THEN ((str2 <> str2)) WHEN ((num3 <= 0.0)) THEN ((str2 <> str3)) ELSE ((str2 <> null)) END (type: boolean), num3 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT DAY(Calcs.date1) AS dy_date1_ok FROM druid_tableau.calcs Calcs WHERE (DAY(Calcs.date1) IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT DAY(Calcs.date1) AS dy_date1_ok FROM druid_tableau.calcs Calcs WHERE (DAY(Calcs.date1) IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames
+ druid.fieldTypes
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"date1","value":null,"extractionFn":{"type":"timeFormat","format":"d","timeZone":"US/Pacific","locale":"en-US"}},"columns":[],"resultFormat":"compactedList","limit":1}
+ druid.query.type scan
+ Select Operator
+ expressions: null (type: int)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(HOUR(Calcs.`__time`) AS STRING), '') AS none_z_datename_hour_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(HOUR(Calcs.`__time`) AS STRING), '') AS none_z_datename_hour_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), COALESCE(UDFToString(hour(vc)),'') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(SECOND(Calcs.`__time`) AS STRING), '') AS none_z_datename_second_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(SECOND(Calcs.`__time`) AS STRING), '') AS none_z_datename_second_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), COALESCE(UDFToString(second(vc)),'') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_hour_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_hour_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,int1
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), int1 (type: int), key (type: string), from_unixtime((if((to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss') > 0), to_unix_timestamp(vc,'yyyy-MM-dd HH:mm:ss'), to_unix_timestamp(vc,'yyyy-MM-dd')) + UDFToLong((int1 * 3600))), 'yyyy-MM-dd HH:mm:ss') (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: timestamp with local time zone), KEY._col1 (type: int), KEY._col2 (type: string), KEY._col3 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames __time,key,str0,str1,str2,str3,date0,date1,date2,date3,time0,time1,datetime1,zzz,bool0,bool1,bool2,bool3,int0,int1,int2,int3,num0,num1,num2,num3,num4
+ druid.fieldTypes timestamp with local time zone,string,string,string,string,string,string,string,string,string,string,string,string,string,string,string,string,string,int,int,int,int,double,double,double,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["__time","key","str0","str1","str2","str3","date0","date1","date2","date3","time0","time1","datetime1","zzz","bool0","bool1","bool2","bool3","int0","int1","int2","int3","num0","num1","num2","num3","num4"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double)
+ outputColumnNames: key, num0
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(num0), count(num0), avg(num0)
+ keys: key (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: double), _col2 (type: bigint), _col3 (type: struct)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0), count(VALUE._col1), avg(VALUE._col2)
+ keys: KEY._col0 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num2 AS none_num2_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num2 AS none_num2_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,vc0,num2
+ druid.fieldTypes boolean,boolean,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"},{"type":"default","dimension":"vc0","outputName":"vc0","outputType":"STRING"},{"type":"default","dimension":"num2","outputName":"num2","outputType":"DOUBLE"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num1\" > 10)","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"(\"num2\" > 10)","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: vc (type: boolean), vc0 (type: boolean), num2 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2,vc
+ druid.fieldTypes string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"strlen(\"str2\")","outputType":"LONG"}],"columns":["key","str2","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), str2 (type: string), vc (type: int)
+ outputColumnNames: key, str2, vc
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: min(str2), min(vc)
+ keys: key (type: string), str2 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 17 Data size: 6324 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col2 (type: string), _col3 (type: int)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: min(VALUE._col0), min(VALUE._col1)
+ keys: KEY._col0 (type: string), KEY._col1 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 2976 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 2976 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) AS none_date_datetime0__ok, COUNT(Calcs.key) AS cnt_key_ok FROM druid_tableau.calcs Calcs GROUP BY TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) AS none_date_datetime0__ok, COUNT(Calcs.key) AS cnt_key_ok FROM druid_tableau.calcs Calcs GROUP BY TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3808 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: to_date(CASE WHEN ((day(vc) < 10)) THEN (null) ELSE (vc) END) (type: date), key (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3808 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count(_col1)
+ keys: _col0 (type: date)
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 17 Data size: 3808 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: date)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: date)
+ Statistics: Num rows: 17 Data size: 3808 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: date)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 8 Data size: 1792 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1792 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CONCAT(CONCAT(' ',Calcs.str2),' ') AS none_padded_str2_nk, CONCAT(CONCAT('|',RTRIM(CONCAT(CONCAT(' ',Calcs.str2),' '))),'|') AS none_z_rtrim_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, CONCAT(CONCAT(' ',Calcs.str2),' ') AS none_padded_str2_nk, CONCAT(CONCAT('|',RTRIM(CONCAT(CONCAT(' ',Calcs.str2),' '))),'|') AS none_z_rtrim_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str2"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), concat(concat(' ', str2), ' ') (type: string), concat(concat('|', rtrim(concat(concat(' ', str2), ' '))), '|') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), UDFToLong(((((((datediff(vc, '2004-07-04') * 24) + COALESCE(hour(vc),0)) - 0) * 60) + COALESCE(minute(vc),0)) - 0)) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, HOUR(Calcs.time1) AS sum_z_timepart_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, HOUR(Calcs.time1) AS sum_z_timepart_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,time1,vc
+ druid.fieldTypes string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"time1\",'HOUR','US/Pacific')","outputType":"LONG"}],"columns":["key","time1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), time1 (type: string), vc (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs WHERE (Calcs.key IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs WHERE (Calcs.key IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames
+ druid.fieldTypes
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"selector","dimension":"key","value":null},"columns":[],"resultFormat":"compactedList","limit":1}
+ druid.query.type scan
+ Select Operator
+ expressions: null (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, POW(Calcs.num0,2) AS sum_z_square_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, POW(Calcs.num0,2) AS sum_z_square_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,vc
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"pow(\"num0\",2)","outputType":"DOUBLE"}],"columns":["key","num0","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), num0 (type: double), vc (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,$f1,$f2
+ druid.fieldTypes int,double,bigint
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"LONG"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"num0"},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"having":{"type":"filter","filter":{"type":"bound","dimension":"$f2","lower":"0","lowerStrict":true,"ordering":"numeric"}}}
+ druid.query.type groupBy
+ Select Operator
+ expressions: $f1 (type: double), $f1 (type: double)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,date0,date1,num0,num1
+ druid.fieldTypes string,string,string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","date0","date1","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 9656 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: date0 (type: string), date1 (type: string), key (type: string), num0 (type: double), num1 (type: double), CASE WHEN ((num0 > num1)) THEN (date0) WHEN ((num0 <= num1)) THEN (date1) ELSE (null) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 9656 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: double), _col5 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 17 Data size: 9656 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: double), _col5 (type: string)
+ sort order: ++++++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: double), _col4 (type: double), _col5 (type: string)
+ Statistics: Num rows: 17 Data size: 9656 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: double), KEY._col4 (type: double), KEY._col5 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Statistics: Num rows: 8 Data size: 4544 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 4544 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), (CASE WHEN MONTH(Calcs.`__time`)<4 THEN '-01' WHEN MONTH(Calcs.`__time`)<7 THEN '-04' WHEN MONTH(Calcs.`__time`)<10 THEN '-07' ELSE '-10' END), '-01 00:00:00') AS none_z_datetrunc_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), (CASE WHEN MONTH(Calcs.`__time`)<4 THEN '-01' WHEN MONTH(Calcs.`__time`)<7 THEN '-04' WHEN MONTH(Calcs.`__time`)<10 THEN '-07' ELSE '-10' END), '-01 00:00:00') AS none_z_datetrunc_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"concat(timestamp_extract(\"__time\",'YEAR','US/Pacific'),case_searched((timestamp_extract(\"__time\",'MONTH','US/Pacific') < 4),'-01',(timestamp_extract(\"__time\",'MONTH','US/Pacific') < 7),'-04',(timestamp_extract(\"__time\",'MONTH','US/Pacific') < 10),'-07','-10'),'-01 00:00:00')","outputType":"STRING"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MINUTE(Calcs.`__time`) AS sum_z_datepart_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MINUTE(Calcs.`__time`) AS sum_z_datepart_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'MINUTE','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 < '1975-11-12') AS none_z_date_lt_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 < '1975-11-12') AS none_z_date_lt_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames date0,key,vc
+ druid.fieldTypes string,string,boolean
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"date0\" < '1975-11-12')","outputType":"LONG"}],"columns":["date0","key","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: date0 (type: string), key (type: string), vc (type: boolean)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.str0 AS none_str0_nk, 'CONST' AS none_z_const_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str0
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.str0 AS none_str0_nk, 'CONST' AS none_z_const_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str0
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames str0
+ druid.fieldTypes string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"str0","outputName":"str0","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: str0 (type: string), 'CONST' (type: string)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, Calcs.int1 AS sum_int1_ok, CASE WHEN Calcs.int1 = 0 THEN NULL ELSE ( Calcs.int0 / Calcs.int1 ) END AS sum_z_div_int_int_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, Calcs.int1 AS sum_int1_ok, CASE WHEN Calcs.int1 = 0 THEN NULL ELSE ( Calcs.int0 / Calcs.int1 ) END AS sum_z_div_int_int_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,int0,int1
+ druid.fieldTypes string,int,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","int0","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), int0 (type: int), int1 (type: int), CASE WHEN ((int1 = 0)) THEN (null) ELSE ((UDFToDouble(int0) / UDFToDouble(int1))) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 ELSE Calcs.num1 END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 ELSE Calcs.num1 END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1,$f3
+ druid.fieldTypes string,double,double,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"num0","outputName":"num0","outputType":"DOUBLE"},{"type":"default","dimension":"num1","outputName":"num1","outputType":"DOUBLE"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f3","expression":"case_searched((\"num0\" > \"num1\"),\"num0\",\"num1\")"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), $f3 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, MINUTE(Calcs.time1) AS sum_z_timepart_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, MINUTE(Calcs.time1) AS sum_z_timepart_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,time1,vc
+ druid.fieldTypes string,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"time1\",'MINUTE','US/Pacific')","outputType":"LONG"}],"columns":["key","time1","vc"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), time1 (type: string), vc (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, YEAR(Calcs.`__time`) AS none_z_year_ok, YEAR(Calcs.`__time`) AS sum_z_datepart_year_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, YEAR(Calcs.`__time`) AS none_z_year_ok, YEAR(Calcs.`__time`) AS sum_z_datepart_year_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,vc0,vc1
+ druid.fieldTypes timestamp with local time zone,string,int,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc1","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"}],"columns":["vc","key","vc0","vc1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), vc0 (type: int), vc1 (type: int)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 > (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str0 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_gt_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 > (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str0 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_gt_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str0,str2,str3,num3
+ druid.fieldTypes string,string,string,string,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","str0","str2","str3","num3"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), str0 (type: string), str2 (type: string), str3 (type: string), CASE WHEN ((num3 > 0.0)) THEN ((str2 > str0)) WHEN ((num3 <= 0.0)) THEN ((str2 > str3)) ELSE ((str2 > null)) END (type: boolean), num3 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(MINUTE(Calcs.`__time`) AS STRING), '') AS none_z_datename_minute_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(MINUTE(Calcs.`__time`) AS STRING), '') AS none_z_datename_minute_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), COALESCE(UDFToString(minute(vc)),'') (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 >= Calcs.num1) AS none_z_num_ge_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, (Calcs.num0 >= Calcs.num1) AS none_z_num_ge_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,vc,num0,num1
+ druid.fieldTypes string,boolean,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"(\"num0\" >= \"num1\")","outputType":"LONG"}],"columns":["key","vc","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: key (type: string), vc (type: boolean), num0 (type: double), num1 (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str2 AS none_z_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str2 AS none_z_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,str2
+ druid.fieldTypes string,string
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"},{"type":"default","dimension":"str2","outputName":"str2","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), str2 (type: string), str2 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,$f1
+ druid.fieldTypes string,double
+ druid.query.json {"queryType":"groupBy","dataSource":"druid_tableau.calcs","granularity":"all","dimensions":[{"type":"default","dimension":"key","outputName":"key","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMin","name":"$f1","fieldName":"num0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+ druid.query.type groupBy
+ Select Operator
+ expressions: key (type: string), $f1 (type: double), $f1 (type: double)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num0 AS DOUBLE) WHEN NOT (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num1 AS DOUBLE) ELSE NULL END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num0 AS DOUBLE) WHEN NOT (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num1 AS DOUBLE) ELSE NULL END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames key,num0,num1
+ druid.fieldTypes string,double,double
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"columns":["key","num0","num1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), num0 (type: double), num1 (type: double), CASE WHEN ((num0 > num1)) THEN (num0) WHEN ((num0 <= num1)) THEN (num1) ELSE (null) END (type: double)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: sum(_col3)
+ keys: _col0 (type: string), _col1 (type: double), _col2 (type: double)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: double), _col2 (type: double)
+ sort order: +++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: double)
+ Statistics: Num rows: 17 Data size: 3400 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col3 (type: double)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: sum(VALUE._col0)
+ keys: KEY._col0 (type: string), KEY._col1 (type: double), KEY._col2 (type: double)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1600 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_quarter_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_quarter_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key,int1
+ druid.fieldTypes timestamp with local time zone,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key","int1"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), int1 (type: int), key (type: string), CASE WHEN ((vc is not null and int1 is not null)) THEN (from_unixtime(to_unix_timestamp(concat((UDFToLong(year(vc)) + floor((UDFToDouble((month(vc) + (int1 * 3))) / 12.0))), concat('-', concat(lpad(((month(vc) + (int1 * 3)) pmod 12), 2, '0'), substr(vc, 8)))),substr('yyyy-MM-dd HH:mm:ss', 0, character_length(UDFToString(vc)))), 'yyyy-MM-dd HH:mm:ss')) ELSE (null) END (type: string)
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ sort order: ++++
+ Map-reduce partition columns: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 17 Data size: 3876 Basic stats: COMPLETE Column stats: NONE
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ keys: KEY._col0 (type: timestamp with local time zone), KEY._col1 (type: int), KEY._col2 (type: string), KEY._col3 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1824 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,str2,vc0
+ druid.fieldTypes int,string,int
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"1","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"strlen(\"str2\")","outputType":"LONG"}],"columns":["vc","str2","vc0"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: vc (type: int), str2 (type: string), vc0 (type: int)
+ outputColumnNames: vc, str2, vc0
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: min(str2), min(vc0), count()
+ keys: vc (type: int)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
+ Statistics: Num rows: 17 Data size: 3264 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: bigint)
+ Reducer 2
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: min(VALUE._col0), min(VALUE._col1), count(VALUE._col2)
+ keys: KEY._col0 (type: int)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: string), _col2 (type: int), _col3 (type: bigint)
+ outputColumnNames: _col1, _col2, _col3
+ Statistics: Num rows: 8 Data size: 1536 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col3 > 0) (type: boolean)
+ Statistics: Num rows: 2 Data size: 384 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col1 (type: string), _col2 (type: int)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 2 Data size: 384 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 2 Data size: 384 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: calcs
+ properties:
+ druid.fieldNames vc,key
+ druid.fieldTypes timestamp with local time zone,string
+ druid.query.json {"queryType":"scan","dataSource":"druid_tableau.calcs","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","key"],"resultFormat":"compactedList"}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: timestamp with local time zone), key (type: string), UDFToLong((((datediff(vc, '2004-07-04') * 24) + COALESCE(hour(vc),0)) - 0)) (type: bigint)
+ outputColumnNames: _col0, _col1, _col2
+ ListSink
+
diff --git ql/src/test/results/clientpositive/druid/druidmini_tableau_query.q.out ql/src/test/results/clientpositive/druid/druidmini_tableau_query.q.out
new file mode 100644
index 0000000..51f1ae4
--- /dev/null
+++ ql/src/test/results/clientpositive/druid/druidmini_tableau_query.q.out
@@ -0,0 +1,4701 @@
+PREHOOK: query: create database druid_tableau
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:druid_tableau
+POSTHOOK: query: create database druid_tableau
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:druid_tableau
+PREHOOK: query: use druid_tableau
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:druid_tableau
+POSTHOOK: query: use druid_tableau
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:druid_tableau
+PREHOOK: query: drop table if exists calcs
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists calcs
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table calcs
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES (
+ "druid.segment.granularity" = "MONTH",
+ "druid.query.granularity" = "DAY")
+AS SELECT
+ cast(datetime0 as timestamp with local time zone) `__time`,
+ key,
+ str0, str1, str2, str3,
+ date0, date1, date2, date3,
+ time0, time1,
+ datetime1,
+ zzz,
+ cast(bool0 as string) bool0,
+ cast(bool1 as string) bool1,
+ cast(bool2 as string) bool2,
+ cast(bool3 as string) bool3,
+ int0, int1, int2, int3,
+ num0, num1, num2, num3, num4
+from default.calcs_orc
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@calcs_orc
+PREHOOK: Output: database:druid_tableau
+PREHOOK: Output: druid_tableau@calcs
+POSTHOOK: query: create table calcs
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES (
+ "druid.segment.granularity" = "MONTH",
+ "druid.query.granularity" = "DAY")
+AS SELECT
+ cast(datetime0 as timestamp with local time zone) `__time`,
+ key,
+ str0, str1, str2, str3,
+ date0, date1, date2, date3,
+ time0, time1,
+ datetime1,
+ zzz,
+ cast(bool0 as string) bool0,
+ cast(bool1 as string) bool1,
+ cast(bool2 as string) bool2,
+ cast(bool3 as string) bool3,
+ int0, int1, int2, int3,
+ num0, num1, num2, num3, num4
+from default.calcs_orc
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@calcs_orc
+POSTHOOK: Output: database:druid_tableau
+POSTHOOK: Output: druid_tableau@calcs
+POSTHOOK: Lineage: calcs.__time EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:datetime0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.bool0 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool0, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.bool1 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool1, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.bool2 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool2, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.bool3 EXPRESSION [(calcs_orc)calcs_orc.FieldSchema(name:bool3, type:boolean, comment:null), ]
+POSTHOOK: Lineage: calcs.date0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.date1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.date2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date2, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.date3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:date3, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.datetime1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:datetime1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.int0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int0, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.int1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int1, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.int2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int2, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.int3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:int3, type:int, comment:null), ]
+POSTHOOK: Lineage: calcs.key SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.num0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num0, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num1, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num2, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num3, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.num4 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:num4, type:double, comment:null), ]
+POSTHOOK: Lineage: calcs.str0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.str1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.str2 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str2, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.str3 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:str3, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.time0 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:time0, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.time1 SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:time1, type:string, comment:null), ]
+POSTHOOK: Lineage: calcs.zzz SIMPLE [(calcs_orc)calcs_orc.FieldSchema(name:zzz, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_ FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_ FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 17 920.059997406006
+PREHOOK: query: SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 246.48999401092533 2004
+10.0 1 100.0 NULL
+12.300000190734863 1 151.29000469207767 2004
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+-3.5 1 12.25 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+-12.300000190734863 1 151.29000469207767 1972
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+3.5 1 12.25 2004
+0.0 1 0.0 NULL
+15.699999809265137 1 246.48999401092533 1975
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0 0.0
+key08 0 0.0
+key00 1 1.0
+key07 0 0.0
+key10 4 2.0
+key15 4 2.0
+key12 0 0.0
+key13 4 2.0
+key05 3 1.7320508075688772
+key09 8 2.8284271247461903
+key11 10 3.1622776601683795
+key01 0 0.0
+key06 8 2.8284271247461903
+key16 8 2.8284271247461903
+key04 7 2.6457513110645907
+key14 11 3.3166247903554
+key02 0 0.0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 17
+2004-07-04 17:00:00.0 US/Pacific key08 17
+2004-07-08 17:00:00.0 US/Pacific key00 113
+2004-07-12 17:00:00.0 US/Pacific key07 209
+2004-07-13 17:00:00.0 US/Pacific key10 233
+2004-07-13 17:00:00.0 US/Pacific key15 233
+2004-07-16 17:00:00.0 US/Pacific key12 305
+2004-07-19 17:00:00.0 US/Pacific key13 377
+2004-07-21 17:00:00.0 US/Pacific key05 425
+2004-07-23 17:00:00.0 US/Pacific key09 473
+2004-07-24 17:00:00.0 US/Pacific key11 497
+2004-07-25 17:00:00.0 US/Pacific key01 521
+2004-07-27 17:00:00.0 US/Pacific key06 569
+2004-07-27 17:00:00.0 US/Pacific key16 569
+2004-07-28 17:00:00.0 US/Pacific key04 593
+2004-07-30 17:00:00.0 US/Pacific key14 641
+2004-08-01 17:00:00.0 US/Pacific key02 689
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) - YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) AS BIGINT) AS sum_z_now_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) - YEAR(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss')) AS BIGINT) AS sum_z_now_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0
+key08 0
+key00 0
+key07 0
+key10 0
+key15 0
+key12 0
+key13 0
+key05 0
+key09 0
+key11 0
+key01 0
+key06 0
+key16 0
+key04 0
+key14 0
+key02 0
+PREHOOK: query: SELECT Calcs.str2 AS temp_z_min_str2___3992540197__0_, LENGTH(Calcs.str2) AS min_len_str2__ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS temp_z_min_str2___3992540197__0_, LENGTH(Calcs.str2) AS min_len_str2__ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL 0 key03 NULL
+nine 4 key08 nine
+one 3 key00 one
+eight 5 key07 eight
+eleven 6 key10 eleven
+sixteen 7 key15 sixteen
+NULL 0 key12 NULL
+fourteen 8 key13 fourteen
+six 3 key05 six
+ten 3 key09 ten
+twelve 6 key11 twelve
+two 3 key01 two
+NULL 0 key06 NULL
+NULL 0 key16 NULL
+five 4 key04 five
+fifteen 7 key14 fifteen
+three 5 key02 three
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num0 AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num0 AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 -15.699999809265137
+key08 10.0 10.0
+key00 12.300000190734863 12.300000190734863
+key07 0.0 0.0
+key10 0.0 0.0
+key15 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key05 -3.5 -3.5
+key09 0.0 0.0
+key11 0.0 0.0
+key01 -12.300000190734863 -12.300000190734863
+key06 0.0 0.0
+key16 0.0 0.0
+key04 3.5 3.5
+key14 0.0 0.0
+key02 15.699999809265137 15.699999809265137
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_weekday_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_weekday_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-08
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-12
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-13
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-13
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-16
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-19
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-21
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-23
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-24
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-27
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-27
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-28
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-30
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.`__time`) + CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR('2004-07-04') + CAST((MONTH('2004-07-04') - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.`__time`) + CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR('2004-07-04') + CAST((MONTH('2004-07-04') - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 0
+PREHOOK: query: SELECT Calcs.num0 AS temp_z_var_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_var_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_var_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_z_var_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_var_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_var_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 246.48999401092533 key03 NULL
+10.0 1 100.0 key08 NULL
+12.300000190734863 1 151.29000469207767 key00 NULL
+0.0 1 0.0 key07 NULL
+0.0 1 0.0 key10 NULL
+0.0 1 0.0 key15 NULL
+0.0 1 0.0 key12 NULL
+0.0 1 0.0 key13 NULL
+-3.5 1 12.25 key05 NULL
+0.0 1 0.0 key09 NULL
+0.0 1 0.0 key11 NULL
+-12.300000190734863 1 151.29000469207767 key01 NULL
+0.0 1 0.0 key06 NULL
+0.0 1 0.0 key16 NULL
+3.5 1 12.25 key04 NULL
+0.0 1 0.0 key14 NULL
+15.699999809265137 1 246.48999401092533 key02 NULL
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, COALESCE(Calcs.date0, '2010-04-12') AS none_z_ifnull_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, COALESCE(Calcs.date0, '2010-04-12') AS none_z_ifnull_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 2004-06-04
+NULL key08 2010-04-12
+2004-04-15 key00 2004-04-15
+NULL key07 2010-04-12
+NULL key10 2010-04-12
+NULL key15 2010-04-12
+NULL key12 2010-04-12
+NULL key13 2010-04-12
+NULL key05 2010-04-12
+NULL key09 2010-04-12
+NULL key11 2010-04-12
+1972-07-04 key01 1972-07-04
+NULL key06 2010-04-12
+NULL key16 2010-04-12
+2004-06-19 key04 2004-06-19
+NULL key14 2010-04-12
+1975-11-12 key02 1975-11-12
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT))) AS none_z_datetrunc_week_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(-((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) - 1) AS INT))) AS none_z_datetrunc_week_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04 00:00:00
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04 00:00:00
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-04 00:00:00
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-11 00:00:00
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-11 00:00:00
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-11 00:00:00
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-11 00:00:00
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-18 00:00:00
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-18 00:00:00
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-18 00:00:00
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-18 00:00:00
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25 00:00:00
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-25 00:00:00
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-25 00:00:00
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-25 00:00:00
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-25 00:00:00
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01 00:00:00
+PREHOOK: query: SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 = 'sixteen') OR (Calcs.str2 IS NULL)) GROUP BY Calcs.str2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 = 'sixteen') OR (Calcs.str2 IS NULL)) GROUP BY Calcs.str2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL -5.539999485015869
+sixteen -10.979999542236328
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, CASE WHEN 3 >= 0 THEN SUBSTRING(Calcs.str2,1,CAST(3 AS INT)) ELSE NULL END AS none_z_left_str_num_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, CASE WHEN 3 >= 0 THEN SUBSTRING(Calcs.str2,1,CAST(3 AS INT)) ELSE NULL END AS none_z_left_str_num_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL NULL
+key08 nine nin
+key00 one one
+key07 eight eig
+key10 eleven ele
+key15 sixteen six
+key12 NULL NULL
+key13 fourteen fou
+key05 six six
+key09 ten ten
+key11 twelve twe
+key01 two two
+key06 NULL NULL
+key16 NULL NULL
+key04 five fiv
+key14 fifteen fif
+key02 three thr
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT))) AS none_z_datetrunc_month_ok, MONTH(Calcs.`__time`) AS none_z_month_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00')), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), CAST(CAST(-(DAY(Calcs.`__time`) - 1) AS BIGINT) AS INT))) AS none_z_datetrunc_month_ok, MONTH(Calcs.`__time`) AS none_z_month_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-01 00:00:00 7
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-01 00:00:00 7
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-01 00:00:00 7
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-01 00:00:00 7
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-01 00:00:00 7
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-01 00:00:00 7
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-01 00:00:00 7
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-01 00:00:00 7
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-01 00:00:00 7
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-01 00:00:00 7
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-01 00:00:00 7
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-01 00:00:00 7
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-01 00:00:00 7
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-01 00:00:00 7
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-01 00:00:00 7
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-01 00:00:00 7
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01 00:00:00 8
+PREHOOK: query: SELECT Calcs.num0 AS temp_z_stdevp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdevp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdevp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS stp_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_z_stdevp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdevp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdevp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS stp_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 246.48999401092533 key03 0.0
+10.0 1 100.0 key08 0.0
+12.300000190734863 1 151.29000469207767 key00 0.0
+0.0 1 0.0 key07 0.0
+0.0 1 0.0 key10 0.0
+0.0 1 0.0 key15 0.0
+0.0 1 0.0 key12 0.0
+0.0 1 0.0 key13 0.0
+-3.5 1 12.25 key05 0.0
+0.0 1 0.0 key09 0.0
+0.0 1 0.0 key11 0.0
+-12.300000190734863 1 151.29000469207767 key01 0.0
+0.0 1 0.0 key06 0.0
+0.0 1 0.0 key16 0.0
+3.5 1 12.25 key04 0.0
+0.0 1 0.0 key14 0.0
+15.699999809265137 1 246.48999401092533 key02 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, (CASE WHEN Calcs.int2 = 0 THEN NULL ELSE CAST(Calcs.int3 AS DOUBLE) / Calcs.int2 END) AS sum_z_int_div_zero_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, (CASE WHEN Calcs.int2 = 0 THEN NULL ELSE CAST(Calcs.int3 AS DOUBLE) / Calcs.int2 END) AS sum_z_int_div_zero_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -5 5 -1.0
+key08 -6 17 -2.8333333333333335
+key00 5 8 1.6
+key07 0 3 NULL
+key10 -3 11 -3.6666666666666665
+key15 -9 11 -1.2222222222222223
+key12 0 11 NULL
+key13 4 18 4.5
+key05 2 7 3.5
+key09 -9 2 -0.2222222222222222
+key11 -4 2 -0.5
+key01 -4 13 -3.25
+key06 9 18 2.0
+key16 6 0 0.0
+key04 3 9 3.0
+key14 -8 18 -2.25
+key02 5 2 0.4
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 > Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_max_date_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 > Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_max_date_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 2004-04-04 key03 2004-06-04
+NULL 2004-04-09 key08 NULL
+2004-04-15 2004-04-01 key00 2004-04-15
+NULL 2004-04-08 key07 NULL
+NULL 2004-04-11 key10 NULL
+NULL 2004-04-16 key15 NULL
+NULL 2004-04-13 key12 NULL
+NULL 2004-04-14 key13 NULL
+NULL 2004-04-06 key05 NULL
+NULL 2004-04-10 key09 NULL
+NULL 2004-04-12 key11 NULL
+1972-07-04 2004-04-02 key01 2004-04-02
+NULL 2004-04-07 key06 NULL
+NULL 2004-04-17 key16 NULL
+2004-06-19 2004-04-05 key04 2004-06-19
+NULL 2004-04-15 key14 NULL
+1975-11-12 2004-04-03 key02 2004-04-03
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int3 AS sum_int3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int3 AS sum_int3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 5
+key08 17
+key00 8
+key07 3
+key10 11
+key15 11
+key12 11
+key13 18
+key05 7
+key09 2
+key11 2
+key01 13
+key06 18
+key16 0
+key04 9
+key14 18
+key02 2
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, true AS none_z_true_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, true AS none_z_true_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false
+key08 false
+key00 false
+key07 false
+key10 false
+key15 false
+key12 false
+key13 false
+key05 false
+key09 false
+key11 false
+key01 false
+key06 false
+key16 false
+key04 false
+key14 false
+key02 false
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00') AS none_z_datetrunc_day_ok, DAY(Calcs.`__time`) AS none_z_day_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00') AS none_z_datetrunc_day_ok, DAY(Calcs.`__time`) AS none_z_day_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04 00:00:00 4
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04 00:00:00 4
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-08 00:00:00 8
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-12 00:00:00 12
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-13 00:00:00 13
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-13 00:00:00 13
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-16 00:00:00 16
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-19 00:00:00 19
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-21 00:00:00 21
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-23 00:00:00 23
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-24 00:00:00 24
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25 00:00:00 25
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-27 00:00:00 27
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-27 00:00:00 27
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-28 00:00:00 28
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-30 00:00:00 30
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01 00:00:00 1
+PREHOOK: query: SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_str_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_str_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+19691231 key03 NULL
+19691231 key08 1983-05-22
+19691231 key00 1986-03-20
+19691231 key07 NULL
+19691231 key10 1999-08-20
+19691231 key15 NULL
+19691231 key12 NULL
+19691231 key13 1996-05-13
+19691231 key05 1979-04-01
+19691231 key09 NULL
+19691231 key11 NULL
+19691231 key01 NULL
+19691231 key06 NULL
+19691231 key16 1992-01-18
+19691231 key04 1996-03-07
+19691231 key14 1986-11-08
+19691231 key02 1997-02-02
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END AS none_z_left_str_negative_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END AS none_z_left_str_negative_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, CASE WHEN (-2) >= 0 THEN SUBSTRING(Calcs.str0,1,CAST((-2) AS INT)) ELSE NULL END
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 FURNITURE NULL
+key01 FURNITURE NULL
+key02 OFFICE SUPPLIES NULL
+key03 OFFICE SUPPLIES NULL
+key04 OFFICE SUPPLIES NULL
+key05 OFFICE SUPPLIES NULL
+key06 OFFICE SUPPLIES NULL
+key07 OFFICE SUPPLIES NULL
+key08 TECHNOLOGY NULL
+key09 TECHNOLOGY NULL
+key10 TECHNOLOGY NULL
+key11 TECHNOLOGY NULL
+key12 TECHNOLOGY NULL
+key13 TECHNOLOGY NULL
+key14 TECHNOLOGY NULL
+key15 TECHNOLOGY NULL
+key16 TECHNOLOGY NULL
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 ELSE Calcs.date1 END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL 2004-04-06 key05 -3.5 9.380000114440918 2004-04-06
+NULL 2004-04-07 key06 0.0 16.420000076293945 2004-04-07
+NULL 2004-04-08 key07 0.0 11.380000114440918 2004-04-08
+NULL 2004-04-09 key08 10.0 9.470000267028809 NULL
+NULL 2004-04-10 key09 0.0 12.399999618530273 2004-04-10
+NULL 2004-04-11 key10 0.0 10.319999694824219 2004-04-11
+NULL 2004-04-12 key11 0.0 2.4700000286102295 2004-04-12
+NULL 2004-04-13 key12 0.0 12.050000190734863 2004-04-13
+NULL 2004-04-14 key13 0.0 10.369999885559082 2004-04-14
+NULL 2004-04-15 key14 0.0 7.099999904632568 2004-04-15
+NULL 2004-04-16 key15 0.0 16.809999465942383 2004-04-16
+NULL 2004-04-17 key16 0.0 7.119999885559082 2004-04-17
+1972-07-04 2004-04-02 key01 -12.300000190734863 6.710000038146973 2004-04-02
+1975-11-12 2004-04-03 key02 15.699999809265137 9.779999732971191 1975-11-12
+2004-04-15 2004-04-01 key00 12.300000190734863 8.420000076293945 2004-04-15
+2004-06-04 2004-04-04 key03 -15.699999809265137 7.429999828338623 2004-04-04
+2004-06-19 2004-04-05 key04 3.5 9.050000190734863 2004-04-05
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) AS none_z_case_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2))
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2)) AS none_z_case_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.date0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.date1,Calcs.date2))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL 2004-04-06 1980-11-07 key05 -3.5 9.380000114440918 2004-04-06
+NULL 2004-04-07 1977-02-08 key06 0.0 16.420000076293945 2004-04-07
+NULL 2004-04-08 1974-05-03 key07 0.0 11.380000114440918 2004-04-08
+NULL 2004-04-09 1976-09-09 key08 10.0 9.470000267028809 NULL
+NULL 2004-04-10 1998-08-12 key09 0.0 12.399999618530273 2004-04-10
+NULL 2004-04-11 1974-03-17 key10 0.0 10.319999694824219 2004-04-11
+NULL 2004-04-12 1994-04-20 key11 0.0 2.4700000286102295 2004-04-12
+NULL 2004-04-13 2001-02-04 key12 0.0 12.050000190734863 2004-04-13
+NULL 2004-04-14 1988-01-05 key13 0.0 10.369999885559082 2004-04-14
+NULL 2004-04-15 1972-07-12 key14 0.0 7.099999904632568 2004-04-15
+NULL 2004-04-16 1995-06-04 key15 0.0 16.809999465942383 2004-04-16
+NULL 2004-04-17 2002-04-27 key16 0.0 7.119999885559082 2004-04-17
+1972-07-04 2004-04-02 1995-09-03 key01 -12.300000190734863 6.710000038146973 2004-04-02
+1975-11-12 2004-04-03 1997-09-19 key02 15.699999809265137 9.779999732971191 1975-11-12
+2004-04-15 2004-04-01 1977-04-20 key00 12.300000190734863 8.420000076293945 2004-04-15
+2004-06-04 2004-04-04 1980-07-26 key03 -15.699999809265137 7.429999828338623 2004-04-04
+2004-06-19 2004-04-05 1997-05-30 key04 3.5 9.050000190734863 2004-04-05
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 1 151.29000469207767 NULL
+key01 -12.300000190734863 1 151.29000469207767 NULL
+key02 15.699999809265137 1 246.48999401092533 NULL
+key03 -15.699999809265137 1 246.48999401092533 NULL
+key04 3.5 1 12.25 NULL
+key05 -3.5 1 12.25 NULL
+key06 0.0 1 0.0 NULL
+key07 0.0 1 0.0 NULL
+key08 10.0 1 100.0 NULL
+key09 0.0 1 0.0 NULL
+key10 0.0 1 0.0 NULL
+key11 0.0 1 0.0 NULL
+key12 0.0 1 0.0 NULL
+key13 0.0 1 0.0 NULL
+key14 0.0 1 0.0 NULL
+key15 0.0 1 0.0 NULL
+key16 0.0 1 0.0 NULL
+PREHOOK: query: SELECT CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') AS tyr_date0_ok FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00')
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00') AS tyr_date0_ok FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CAST(YEAR(Calcs.date0) AS STRING), '-01-01 00:00:00')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969-01-01 00:00:00
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, LOWER(Calcs.str1) AS none_z_lower_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, LOWER(Calcs.str1) AS none_z_lower_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 BINDER ACCESSORIES binder accessories
+key08 ANSWERING MACHINES answering machines
+key00 CLAMP ON LAMPS clamp on lamps
+key07 BUSINESS ENVELOPES business envelopes
+key10 CD-R MEDIA cd-r media
+key15 DVD dvd
+key12 CORDED KEYBOARDS corded keyboards
+key13 CORDLESS KEYBOARDS cordless keyboards
+key05 BINDING MACHINES binding machines
+key09 BUSINESS COPIERS business copiers
+key11 CONFERENCE PHONES conference phones
+key01 CLOCKS clocks
+key06 BINDING SUPPLIES binding supplies
+key16 ERICSSON ericsson
+key04 BINDER CLIPS binder clips
+key14 DOT MATRIX PRINTERS dot matrix printers
+key02 AIR PURIFIERS air purifiers
+PREHOOK: query: SELECT Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_date_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, Calcs.date3 AS none_z_date_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL key03 NULL
+1983-05-22 key08 1983-05-22
+1986-03-20 key00 1986-03-20
+NULL key07 NULL
+1999-08-20 key10 1999-08-20
+NULL key15 NULL
+NULL key12 NULL
+1996-05-13 key13 1996-05-13
+1979-04-01 key05 1979-04-01
+NULL key09 NULL
+NULL key11 NULL
+NULL key01 NULL
+NULL key06 NULL
+1992-01-18 key16 1992-01-18
+1996-03-07 key04 1996-03-07
+1986-11-08 key14 1986-11-08
+1997-02-02 key02 1997-02-02
+PREHOOK: query: SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'two')) GROUP BY Calcs.str2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS none_str2_nk, SUM(Calcs.num3) AS sum_num3_ok FROM druid_tableau.calcs Calcs WHERE ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'two')) GROUP BY Calcs.str2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+eight 3.640000104904175
+eleven -4.789999961853027
+fifteen 6.840000152587891
+five 12.930000305175781
+fourteen -18.43000030517578
+nine -13.380000114440918
+one -11.520000457763672
+six -19.959999084472656
+sixteen -10.979999542236328
+ten -10.5600004196167
+three -12.170000076293945
+twelve -10.8100004196167
+two -9.3100004196167
+PREHOOK: query: SELECT CAST(CAST(Calcs.num4 AS BIGINT) AS STRING) AS none_b21622_nk, Calcs.key AS none_key_nk, SUM(Calcs.num4) AS sum_num4_ok FROM druid_tableau.calcs Calcs GROUP BY CAST(CAST(Calcs.num4 AS BIGINT) AS STRING), Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT CAST(CAST(Calcs.num4 AS BIGINT) AS STRING) AS none_b21622_nk, Calcs.key AS none_key_nk, SUM(Calcs.num4) AS sum_num4_ok FROM druid_tableau.calcs Calcs GROUP BY CAST(CAST(Calcs.num4 AS BIGINT) AS STRING), Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-10 key07 -10.239999771118164
+-13 key02 -13.470000267028809
+-14 key14 -14.210000038146973
+-6 key03 -6.050000190734863
+0 key00 0.0
+0 key06 0.0
+0 key09 0.0
+0 key13 0.0
+0 key16 0.0
+10 key01 10.850000381469727
+10 key05 10.710000038146973
+19 key10 19.389999389648438
+3 key11 3.819999933242798
+3 key12 3.380000114440918
+4 key08 4.769999980926514
+6 key15 6.75
+8 key04 8.319999694824219
+PREHOOK: query: SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_calculation1__1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_calculation1__2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_calculation1__4071133194__0_, YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 246.48999401092533 2004
+10.0 1 100.0 NULL
+12.300000190734863 1 151.29000469207767 2004
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+-3.5 1 12.25 NULL
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+-12.300000190734863 1 151.29000469207767 1972
+0.0 1 0.0 NULL
+0.0 1 0.0 NULL
+3.5 1 12.25 2004
+0.0 1 0.0 NULL
+15.699999809265137 1 246.48999401092533 1975
+PREHOOK: query: SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, CONCAT(Calcs.date3, ' 00:00:00') AS none_z_date_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (((YEAR(Calcs.date3) * 10000) + (MONTH(Calcs.date3) * 100)) + DAY(Calcs.date3)) AS md_date3_ok, Calcs.key AS none_key_nk, CONCAT(Calcs.date3, ' 00:00:00') AS none_z_date_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+19691231 key03 00:00:00
+19691231 key08 1983-05-22 00:00:00
+19691231 key00 1986-03-20 00:00:00
+19691231 key07 00:00:00
+19691231 key10 1999-08-20 00:00:00
+19691231 key15 00:00:00
+19691231 key12 00:00:00
+19691231 key13 1996-05-13 00:00:00
+19691231 key05 1979-04-01 00:00:00
+19691231 key09 00:00:00
+19691231 key11 00:00:00
+19691231 key01 00:00:00
+19691231 key06 00:00:00
+19691231 key16 1992-01-18 00:00:00
+19691231 key04 1996-03-07 00:00:00
+19691231 key14 1986-11-08 00:00:00
+19691231 key02 1997-02-02 00:00:00
+PREHOOK: query: SELECT Calcs.num0 AS temp_z_varp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_varp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_varp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS vrp_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_z_varp_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_varp_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_varp_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.num0 IS NULL) THEN CAST(NULL AS DOUBLE) WHEN NOT (Calcs.num0 IS NULL) THEN CAST(0. AS DOUBLE) ELSE NULL END) AS vrp_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 246.48999401092533 key03 0.0
+10.0 1 100.0 key08 0.0
+12.300000190734863 1 151.29000469207767 key00 0.0
+0.0 1 0.0 key07 0.0
+0.0 1 0.0 key10 0.0
+0.0 1 0.0 key15 0.0
+0.0 1 0.0 key12 0.0
+0.0 1 0.0 key13 0.0
+-3.5 1 12.25 key05 0.0
+0.0 1 0.0 key09 0.0
+0.0 1 0.0 key11 0.0
+-12.300000190734863 1 151.29000469207767 key01 0.0
+0.0 1 0.0 key06 0.0
+0.0 1 0.0 key16 0.0
+3.5 1 12.25 key04 0.0
+0.0 1 0.0 key14 0.0
+15.699999809265137 1 246.48999401092533 key02 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 one one 3
+key01 two two 3
+key02 three three 5
+key03 NULL NULL 0
+key04 five five 4
+key05 six six 3
+key06 NULL NULL 0
+key07 eight eight 5
+key08 nine nine 4
+key09 ten ten 3
+key10 eleven eleven 6
+key11 twelve twelve 6
+key12 NULL NULL 0
+key13 fourteen fourteen 8
+key14 fifteen fifteen 7
+key15 sixteen sixteen 7
+key16 NULL NULL 0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 < Calcs.num1) AS none_z_num_lt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 < Calcs.num1) AS none_z_num_lt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false -15.699999809265137 7.429999828338623
+key08 false 10.0 9.470000267028809
+key00 false 12.300000190734863 8.420000076293945
+key07 false 0.0 11.380000114440918
+key10 false 0.0 10.319999694824219
+key15 false 0.0 16.809999465942383
+key12 false 0.0 12.050000190734863
+key13 false 0.0 10.369999885559082
+key05 false -3.5 9.380000114440918
+key09 false 0.0 12.399999618530273
+key11 false 0.0 2.4700000286102295
+key01 false -12.300000190734863 6.710000038146973
+key06 false 0.0 16.420000076293945
+key16 false 0.0 7.119999885559082
+key04 false 3.5 9.050000190734863
+key14 false 0.0 7.099999904632568
+key02 false 15.699999809265137 9.779999732971191
+PREHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 17 920.059997406006 7.5588426878095465
+PREHOOK: query: SELECT COUNT(Calcs.str2) AS cnt_str2_ok, COUNT(Calcs.str2) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT COUNT(Calcs.str2) AS cnt_str2_ok, COUNT(Calcs.str2) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+13 13
+PREHOOK: query: SELECT * FROM druid_tableau.calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT * FROM druid_tableau.calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 OFFICE SUPPLIES BINDER ACCESSORIES NULL e 2004-06-04 2004-04-04 1980-07-26 NULL 1900-01-01 18:51:48 22:50:16 NULL d TRUE FALSE FALSE NULL 0 -4 -5 5 -15.699999809265137 7.429999828338623 8.510000228881836 -7.25 -6.050000190734863
+2004-07-04 17:00:00.0 US/Pacific key08 TECHNOLOGY ANSWERING MACHINES nine NULL NULL 2004-04-09 1976-09-09 1983-05-22 1900-01-01 09:00:59 22:20:14 NULL i NULL NULL FALSE FALSE 0 3 -6 17 10.0 9.470000267028809 0.0 -13.380000114440918 4.769999980926514
+2004-07-08 17:00:00.0 US/Pacific key00 FURNITURE CLAMP ON LAMPS one e 2004-04-15 2004-04-01 1977-04-20 1986-03-20 1899-12-30 21:07:32 19:36:22 NULL a TRUE TRUE FALSE TRUE 1 -3 5 8 12.300000190734863 8.420000076293945 17.860000610351562 -11.520000457763672 0.0
+2004-07-12 17:00:00.0 US/Pacific key07 OFFICE SUPPLIES BUSINESS ENVELOPES eight e NULL 2004-04-08 1974-05-03 NULL 1900-01-01 19:45:54 19:48:23 NULL h FALSE NULL TRUE FALSE 0 2 0 3 0.0 11.380000114440918 17.25 3.640000104904175 -10.239999771118164
+2004-07-13 17:00:00.0 US/Pacific key10 TECHNOLOGY CD-R MEDIA eleven e NULL 2004-04-11 1974-03-17 1999-08-20 1900-01-01 01:31:32 00:05:57 NULL k TRUE TRUE FALSE NULL 4 0 -3 11 0.0 10.319999694824219 6.800000190734863 -4.789999961853027 19.389999389648438
+2004-07-13 17:00:00.0 US/Pacific key15 TECHNOLOGY DVD sixteen e NULL 2004-04-16 1995-06-04 NULL 1899-12-30 22:24:08 NULL NULL p FALSE NULL FALSE TRUE 4 0 -9 11 0.0 16.809999465942383 10.979999542236328 -10.979999542236328 6.75
+2004-07-16 17:00:00.0 US/Pacific key12 TECHNOLOGY CORDED KEYBOARDS NULL NULL NULL 2004-04-13 2001-02-04 NULL 1900-01-01 13:53:46 04:48:07 NULL m NULL FALSE TRUE TRUE 0 0 0 11 0.0 12.050000190734863 0.0 -6.619999885559082 3.380000114440918
+2004-07-19 17:00:00.0 US/Pacific key13 TECHNOLOGY CORDLESS KEYBOARDS fourteen NULL NULL 2004-04-14 1988-01-05 1996-05-13 1900-01-01 04:57:51 NULL NULL n NULL FALSE TRUE TRUE 4 0 4 18 0.0 10.369999885559082 13.039999961853027 -18.43000030517578 0.0
+2004-07-21 17:00:00.0 US/Pacific key05 OFFICE SUPPLIES BINDING MACHINES six NULL NULL 2004-04-06 1980-11-07 1979-04-01 1900-01-01 08:59:39 19:57:33 NULL f NULL FALSE TRUE FALSE 3 0 2 7 -3.5 9.380000114440918 8.979999542236328 -19.959999084472656 10.710000038146973
+2004-07-23 17:00:00.0 US/Pacific key09 TECHNOLOGY BUSINESS COPIERS ten e NULL 2004-04-10 1998-08-12 NULL 1900-01-01 20:36:00 NULL NULL j NULL TRUE FALSE NULL 8 3 -9 2 0.0 12.399999618530273 11.5 -10.5600004196167 0.0
+2004-07-24 17:00:00.0 US/Pacific key11 TECHNOLOGY CONFERENCE PHONES twelve NULL NULL 2004-04-12 1994-04-20 NULL 1899-12-30 22:15:40 04:40:49 NULL l FALSE TRUE TRUE NULL 10 -8 -4 2 0.0 2.4700000286102295 3.7899999618530273 -10.8100004196167 3.819999933242798
+2004-07-25 17:00:00.0 US/Pacific key01 FURNITURE CLOCKS two e 1972-07-04 2004-04-02 1995-09-03 NULL 1900-01-01 13:48:48 02:05:25 NULL b FALSE TRUE FALSE NULL 0 -6 -4 13 -12.300000190734863 6.710000038146973 16.729999542236328 -9.3100004196167 10.850000381469727
+2004-07-27 17:00:00.0 US/Pacific key06 OFFICE SUPPLIES BINDING SUPPLIES NULL e NULL 2004-04-07 1977-02-08 NULL 1900-01-01 07:37:48 NULL NULL g TRUE NULL FALSE NULL 8 0 9 18 0.0 16.420000076293945 11.6899995803833 10.930000305175781 0.0
+2004-07-27 17:00:00.0 US/Pacific key16 TECHNOLOGY ERICSSON NULL NULL NULL 2004-04-17 2002-04-27 1992-01-18 1900-01-01 11:58:29 12:33:57 NULL q NULL NULL FALSE NULL 8 -9 6 0 0.0 7.119999885559082 7.869999885559082 -2.5999999046325684 0.0
+2004-07-28 17:00:00.0 US/Pacific key04 OFFICE SUPPLIES BINDER CLIPS five NULL 2004-06-19 2004-04-05 1997-05-30 1996-03-07 1900-01-01 15:01:19 NULL NULL e FALSE FALSE TRUE TRUE 7 0 3 9 3.5 9.050000190734863 6.460000038146973 12.930000305175781 8.319999694824219
+2004-07-30 17:00:00.0 US/Pacific key14 TECHNOLOGY DOT MATRIX PRINTERS fifteen e NULL 2004-04-15 1972-07-12 1986-11-08 1899-12-30 22:42:43 18:58:41 NULL o TRUE FALSE TRUE NULL 11 0 -8 18 0.0 7.099999904632568 0.0 6.840000152587891 -14.210000038146973
+2004-08-01 17:00:00.0 US/Pacific key02 OFFICE SUPPLIES AIR PURIFIERS three e 1975-11-12 2004-04-03 1997-09-19 1997-02-02 1900-01-01 18:21:08 09:33:31 NULL c NULL TRUE FALSE NULL 0 0 5 2 15.699999809265137 9.779999732971191 0.0 -12.170000076293945 -13.470000267028809
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) AS none_z_if_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END) AS none_z_if_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.str2 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.str3 ELSE Calcs.str0 END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 8.420000076293945 FURNITURE one e one
+key01 -12.300000190734863 6.710000038146973 FURNITURE two e e
+key02 15.699999809265137 9.779999732971191 OFFICE SUPPLIES three e three
+key03 -15.699999809265137 7.429999828338623 OFFICE SUPPLIES NULL e e
+key04 3.5 9.050000190734863 OFFICE SUPPLIES five NULL NULL
+key05 -3.5 9.380000114440918 OFFICE SUPPLIES six NULL NULL
+key06 0.0 16.420000076293945 OFFICE SUPPLIES NULL e e
+key07 0.0 11.380000114440918 OFFICE SUPPLIES eight e e
+key08 10.0 9.470000267028809 TECHNOLOGY nine NULL nine
+key09 0.0 12.399999618530273 TECHNOLOGY ten e e
+key10 0.0 10.319999694824219 TECHNOLOGY eleven e e
+key11 0.0 2.4700000286102295 TECHNOLOGY twelve NULL NULL
+key12 0.0 12.050000190734863 TECHNOLOGY NULL NULL NULL
+key13 0.0 10.369999885559082 TECHNOLOGY fourteen NULL NULL
+key14 0.0 7.099999904632568 TECHNOLOGY fifteen e e
+key15 0.0 16.809999465942383 TECHNOLOGY sixteen e e
+key16 0.0 7.119999885559082 TECHNOLOGY NULL NULL NULL
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 <= '1975-11-12') AS none_z_date_le_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 <= '1975-11-12') AS none_z_date_le_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 false
+NULL key08 false
+2004-04-15 key00 false
+NULL key07 false
+NULL key10 false
+NULL key15 false
+NULL key12 false
+NULL key13 false
+NULL key05 false
+NULL key09 false
+NULL key11 false
+1972-07-04 key01 false
+NULL key06 false
+NULL key16 false
+2004-06-19 key04 false
+NULL key14 false
+1975-11-12 key02 false
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM(IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.num0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.num1,Calcs.num2))) AS sum_z_case_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM(IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.num0,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.num1,Calcs.num2))) AS sum_z_case_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 8.420000076293945 17.860000610351562 12.300000190734863
+key01 -12.300000190734863 6.710000038146973 16.729999542236328 6.710000038146973
+key02 15.699999809265137 9.779999732971191 0.0 15.699999809265137
+key03 -15.699999809265137 7.429999828338623 8.510000228881836 7.429999828338623
+key04 3.5 9.050000190734863 6.460000038146973 9.050000190734863
+key05 -3.5 9.380000114440918 8.979999542236328 9.380000114440918
+key06 0.0 16.420000076293945 11.6899995803833 16.420000076293945
+key07 0.0 11.380000114440918 17.25 11.380000114440918
+key08 10.0 9.470000267028809 0.0 10.0
+key09 0.0 12.399999618530273 11.5 12.399999618530273
+key10 0.0 10.319999694824219 6.800000190734863 10.319999694824219
+key11 0.0 2.4700000286102295 3.7899999618530273 2.4700000286102295
+key12 0.0 12.050000190734863 0.0 12.050000190734863
+key13 0.0 10.369999885559082 13.039999961853027 10.369999885559082
+key14 0.0 7.099999904632568 0.0 7.099999904632568
+key15 0.0 16.809999465942383 10.979999542236328 16.809999465942383
+key16 0.0 7.119999885559082 7.869999885559082 7.119999885559082
+PREHOOK: query: SELECT COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+17 17
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 - Calcs.num1) AS sum_z_num_minus_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 - Calcs.num1) AS sum_z_num_minus_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 7.429999828338623 -23.12999963760376
+key08 10.0 9.470000267028809 0.5299997329711914
+key00 12.300000190734863 8.420000076293945 3.880000114440918
+key07 0.0 11.380000114440918 -11.380000114440918
+key10 0.0 10.319999694824219 -10.319999694824219
+key15 0.0 16.809999465942383 -16.809999465942383
+key12 0.0 12.050000190734863 -12.050000190734863
+key13 0.0 10.369999885559082 -10.369999885559082
+key05 -3.5 9.380000114440918 -12.880000114440918
+key09 0.0 12.399999618530273 -12.399999618530273
+key11 0.0 2.4700000286102295 -2.4700000286102295
+key01 -12.300000190734863 6.710000038146973 -19.010000228881836
+key06 0.0 16.420000076293945 -16.420000076293945
+key16 0.0 7.119999885559082 -7.119999885559082
+key04 3.5 9.050000190734863 -5.550000190734863
+key14 0.0 7.099999904632568 -7.099999904632568
+key02 15.699999809265137 9.779999732971191 5.920000076293945
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ACOS((CASE WHEN 20 = 0 THEN NULL ELSE Calcs.num0 / 20 END)) AS sum_z_acos_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ACOS((CASE WHEN 20 = 0 THEN NULL ELSE Calcs.num0 / 20 END)) AS sum_z_acos_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 2.4734924384383525
+key08 10.0 1.0471975511965979
+key00 12.300000190734863 0.9084103481987317
+key07 0.0 1.5707963267948966
+key10 0.0 1.5707963267948966
+key15 0.0 1.5707963267948966
+key12 0.0 1.5707963267948966
+key13 0.0 1.5707963267948966
+key05 -3.5 1.746702094958613
+key09 0.0 1.5707963267948966
+key11 0.0 1.5707963267948966
+key01 -12.300000190734863 2.2331823053910616
+key06 0.0 1.5707963267948966
+key16 0.0 1.5707963267948966
+key04 3.5 1.3948905586311804
+key14 0.0 1.5707963267948966
+key02 15.699999809265137 0.6681002151514406
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 12.300000190734863
+key01 -12.300000190734863 -12.300000190734863
+key02 15.699999809265137 15.699999809265137
+key03 -15.699999809265137 -15.699999809265137
+key04 3.5 3.5
+key05 -3.5 -3.5
+key06 0.0 0.0
+key07 0.0 0.0
+key08 10.0 10.0
+key09 0.0 0.0
+key10 0.0 0.0
+key11 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key14 0.0 0.0
+key15 0.0 0.0
+key16 0.0 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_le_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_le_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 OFFICE SUPPLIES NULL e NULL -7.25
+key08 TECHNOLOGY nine NULL NULL -13.380000114440918
+key00 FURNITURE one e false -11.520000457763672
+key07 OFFICE SUPPLIES eight e true 3.640000104904175
+key10 TECHNOLOGY eleven e false -4.789999961853027
+key15 TECHNOLOGY sixteen e false -10.979999542236328
+key12 TECHNOLOGY NULL NULL NULL -6.619999885559082
+key13 TECHNOLOGY fourteen NULL NULL -18.43000030517578
+key05 OFFICE SUPPLIES six NULL NULL -19.959999084472656
+key09 TECHNOLOGY ten e false -10.5600004196167
+key11 TECHNOLOGY twelve NULL NULL -10.8100004196167
+key01 FURNITURE two e false -9.3100004196167
+key06 OFFICE SUPPLIES NULL e NULL 10.930000305175781
+key16 TECHNOLOGY NULL NULL NULL -2.5999999046325684
+key04 OFFICE SUPPLIES five NULL true 12.930000305175781
+key14 TECHNOLOGY fifteen e true 6.840000152587891
+key02 OFFICE SUPPLIES three e false -12.170000076293945
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_second_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_second_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1, 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific -4 key03 2004-07-04 16:59:56
+2004-07-04 17:00:00.0 US/Pacific 3 key08 2004-07-04 17:00:03
+2004-07-08 17:00:00.0 US/Pacific -3 key00 2004-07-08 16:59:57
+2004-07-12 17:00:00.0 US/Pacific 2 key07 2004-07-12 17:00:02
+2004-07-13 17:00:00.0 US/Pacific 0 key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific 0 key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific 0 key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific 0 key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific 3 key09 2004-07-23 17:00:03
+2004-07-24 17:00:00.0 US/Pacific -8 key11 2004-07-24 16:59:52
+2004-07-25 17:00:00.0 US/Pacific -6 key01 2004-07-25 16:59:54
+2004-07-27 17:00:00.0 US/Pacific -9 key16 2004-07-27 16:59:51
+2004-07-27 17:00:00.0 US/Pacific 0 key06 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific 0 key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific 0 key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific 0 key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((12 * YEAR(Calcs.`__time`) + MONTH(Calcs.`__time`)) - (12 * YEAR('2004-07-04') + MONTH('2004-07-04')) AS BIGINT) AS sum_z_datediff_month_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((12 * YEAR(Calcs.`__time`) + MONTH(Calcs.`__time`)) - (12 * YEAR('2004-07-04') + MONTH('2004-07-04')) AS BIGINT) AS sum_z_datediff_month_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 ASC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 ASC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key02 0.0
+key03 8.510000228881836
+key04 6.460000038146973
+key05 8.979999542236328
+key08 0.0
+key10 6.800000190734863
+key11 3.7899999618530273
+key12 0.0
+key14 0.0
+key16 7.869999885559082
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LN(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_ln_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LN(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_ln_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 NULL
+key08 10.0 2.302585092994046
+key00 12.300000190734863 2.509599277885271
+key07 0.0 NULL
+key10 0.0 NULL
+key15 0.0 NULL
+key12 0.0 NULL
+key13 0.0 NULL
+key05 -3.5 NULL
+key09 0.0 NULL
+key11 0.0 NULL
+key01 -12.300000190734863 NULL
+key06 0.0 NULL
+key16 0.0 NULL
+key04 3.5 1.252762968495368
+key14 0.0 NULL
+key02 15.699999809265137 2.753660700205545
+PREHOOK: query: SELECT DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)) AS none_calculation_0390402194730773_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END) AS none_calculation_2810402194531916_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_3240402194650458_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END) AS none_calculation_8020402194436198_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_8130402194627126_ok, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') AS none_calculation_8720402194759281_ok FROM druid_tableau.calcs Calcs WHERE (Calcs.key = 'key00' OR Calcs.key = 'key01') GROUP BY DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))), FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)) AS none_calculation_0390402194730773_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END) AS none_calculation_2810402194531916_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_3240402194650458_ok, (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END) AS none_calculation_8020402194436198_ok, IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))) AS none_calculation_8130402194627126_ok, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss') AS none_calculation_8720402194759281_ok FROM druid_tableau.calcs Calcs WHERE (Calcs.key = 'key00' OR Calcs.key = 'key01') GROUP BY DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT)), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), CAST(15 AS INT))), (CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), IF(UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') > 0, FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss'), DATE_ADD((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15' ELSE NULL END), CAST(15 AS INT))), FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_ADD(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), CAST(15 AS INT)), 'yyyy-MM-dd') + (UNIX_TIMESTAMP((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END), 'yyyy-MM-dd HH:mm:ss') - UNIX_TIMESTAMP(TO_DATE((CASE WHEN (Calcs.key = 'key00') THEN '1997-04-01 00:00:42' WHEN NOT (Calcs.key = 'key00') THEN '1997-10-15 00:00:42' ELSE NULL END)), 'yyyy-MM-dd')), 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1997-04-16 1997-04-01 00:00:42 1997-04-16 00:00:42 1997-04-01 1997-04-16 1997-04-16 00:00:42
+1997-10-30 1997-10-15 00:00:42 1997-10-30 00:00:42 1997-10-15 1997-10-30 1997-10-30 00:00:42
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 > Calcs.num1) AS none_z_num_gt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 > Calcs.num1) AS none_z_num_gt_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false -15.699999809265137 7.429999828338623
+key08 false 10.0 9.470000267028809
+key00 false 12.300000190734863 8.420000076293945
+key07 false 0.0 11.380000114440918
+key10 false 0.0 10.319999694824219
+key15 false 0.0 16.809999465942383
+key12 false 0.0 12.050000190734863
+key13 false 0.0 10.369999885559082
+key05 false -3.5 9.380000114440918
+key09 false 0.0 12.399999618530273
+key11 false 0.0 2.4700000286102295
+key01 false -12.300000190734863 6.710000038146973
+key06 false 0.0 16.420000076293945
+key16 false 0.0 7.119999885559082
+key04 false 3.5 9.050000190734863
+key14 false 0.0 7.099999904632568
+key02 false 15.699999809265137 9.779999732971191
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 > '1975-11-12') AS none_z_date_gt_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 > '1975-11-12') AS none_z_date_gt_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 false
+NULL key08 false
+2004-04-15 key00 false
+NULL key07 false
+NULL key10 false
+NULL key15 false
+NULL key12 false
+NULL key13 false
+NULL key05 false
+NULL key09 false
+NULL key11 false
+1972-07-04 key01 false
+NULL key06 false
+NULL key16 false
+2004-06-19 key04 false
+NULL key14 false
+1975-11-12 key02 false
+PREHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 17 0.5882352941176471
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, CONCAT(Calcs.str2,Calcs.str3) AS none_z_str_plus_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, CONCAT(Calcs.str2,Calcs.str3) AS none_z_str_plus_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL e e
+key08 nine NULL nine
+key00 one e onee
+key07 eight e eighte
+key10 eleven e elevene
+key15 sixteen e sixteene
+key12 NULL NULL NULL
+key13 fourteen NULL fourteen
+key05 six NULL six
+key09 ten e tene
+key11 twelve NULL twelve
+key01 two e twoe
+key06 NULL e e
+key16 NULL NULL NULL
+key04 five NULL five
+key14 fifteen e fifteene
+key02 three e threee
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00') AS none_z_datetrunc_year_ok, YEAR(Calcs.`__time`) AS none_z_year_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00') AS none_z_datetrunc_year_ok, YEAR(Calcs.`__time`) AS none_z_year_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-01-01 00:00:00 2004
+2004-07-04 17:00:00.0 US/Pacific key08 2004-01-01 00:00:00 2004
+2004-07-08 17:00:00.0 US/Pacific key00 2004-01-01 00:00:00 2004
+2004-07-12 17:00:00.0 US/Pacific key07 2004-01-01 00:00:00 2004
+2004-07-13 17:00:00.0 US/Pacific key10 2004-01-01 00:00:00 2004
+2004-07-13 17:00:00.0 US/Pacific key15 2004-01-01 00:00:00 2004
+2004-07-16 17:00:00.0 US/Pacific key12 2004-01-01 00:00:00 2004
+2004-07-19 17:00:00.0 US/Pacific key13 2004-01-01 00:00:00 2004
+2004-07-21 17:00:00.0 US/Pacific key05 2004-01-01 00:00:00 2004
+2004-07-23 17:00:00.0 US/Pacific key09 2004-01-01 00:00:00 2004
+2004-07-24 17:00:00.0 US/Pacific key11 2004-01-01 00:00:00 2004
+2004-07-25 17:00:00.0 US/Pacific key01 2004-01-01 00:00:00 2004
+2004-07-27 17:00:00.0 US/Pacific key06 2004-01-01 00:00:00 2004
+2004-07-27 17:00:00.0 US/Pacific key16 2004-01-01 00:00:00 2004
+2004-07-28 17:00:00.0 US/Pacific key04 2004-01-01 00:00:00 2004
+2004-07-30 17:00:00.0 US/Pacific key14 2004-01-01 00:00:00 2004
+2004-08-01 17:00:00.0 US/Pacific key02 2004-01-01 00:00:00 2004
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 1
+key08 10.0 1
+key00 12.300000190734863 1
+key07 0.0 1
+key10 0.0 1
+key15 0.0 1
+key12 0.0 1
+key13 0.0 1
+key05 -3.5 1
+key09 0.0 1
+key11 0.0 1
+key01 -12.300000190734863 1
+key06 0.0 1
+key16 0.0 1
+key04 3.5 1
+key14 0.0 1
+key02 15.699999809265137 1
+PREHOOK: query: SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS ctd_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_countd_date3__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS ctd_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_countd_date3__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0 NULL key03 0
+1 1983-05-22 key08 1
+1 1986-03-20 key00 1
+0 NULL key07 0
+1 1999-08-20 key10 1
+0 NULL key15 0
+0 NULL key12 0
+1 1996-05-13 key13 1
+1 1979-04-01 key05 1
+0 NULL key09 0
+0 NULL key11 0
+0 NULL key01 0
+0 NULL key06 0
+1 1992-01-18 key16 1
+1 1996-03-07 key04 1
+1 1986-11-08 key14 1
+1 1997-02-02 key02 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, EXP((0.10000000000000001 * Calcs.num0)) AS sum_z_exp_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, EXP((0.10000000000000001 * Calcs.num0)) AS sum_z_exp_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 0.20804518632516744
+key08 10.0 2.718281828459045
+key00 12.300000190734863 3.4212296015444488
+key07 0.0 1.0
+key10 0.0 1.0
+key15 0.0 1.0
+key12 0.0 1.0
+key13 0.0 1.0
+key05 -3.5 0.7046880897187134
+key09 0.0 1.0
+key11 0.0 1.0
+key01 -12.300000190734863 0.292292572105821
+key06 0.0 1.0
+key16 0.0 1.0
+key04 3.5 1.4190675485932573
+key14 0.0 1.0
+key02 15.699999809265137 4.80664810209564
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.num1 ELSE Calcs.num2 END)) AS sum_z_if_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.num2 AS none_num2__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.num1 ELSE Calcs.num2 END)) AS sum_z_if_cmp_num_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.num2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 8.420000076293945 17.860000610351562 12.300000190734863
+key01 -12.300000190734863 6.710000038146973 16.729999542236328 6.710000038146973
+key02 15.699999809265137 9.779999732971191 0.0 15.699999809265137
+key03 -15.699999809265137 7.429999828338623 8.510000228881836 7.429999828338623
+key04 3.5 9.050000190734863 6.460000038146973 9.050000190734863
+key05 -3.5 9.380000114440918 8.979999542236328 9.380000114440918
+key06 0.0 16.420000076293945 11.6899995803833 16.420000076293945
+key07 0.0 11.380000114440918 17.25 11.380000114440918
+key08 10.0 9.470000267028809 0.0 10.0
+key09 0.0 12.399999618530273 11.5 12.399999618530273
+key10 0.0 10.319999694824219 6.800000190734863 10.319999694824219
+key11 0.0 2.4700000286102295 3.7899999618530273 2.4700000286102295
+key12 0.0 12.050000190734863 0.0 12.050000190734863
+key13 0.0 10.369999885559082 13.039999961853027 10.369999885559082
+key14 0.0 7.099999904632568 0.0 7.099999904632568
+key15 0.0 16.809999465942383 10.979999542236328 16.809999465942383
+key16 0.0 7.119999885559082 7.869999885559082 7.119999885559082
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) - YEAR('2004-07-04') AS BIGINT) AS sum_z_datediff_year_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) - YEAR('2004-07-04') AS BIGINT) AS sum_z_datediff_year_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 >= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ge_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 >= (CASE WHEN (Calcs.num3 > 0) THEN LOWER(Calcs.str0) WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ge_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 OFFICE SUPPLIES NULL e NULL -7.25
+key08 TECHNOLOGY nine NULL NULL -13.380000114440918
+key00 FURNITURE one e true -11.520000457763672
+key07 OFFICE SUPPLIES eight e false 3.640000104904175
+key10 TECHNOLOGY eleven e true -4.789999961853027
+key15 TECHNOLOGY sixteen e true -10.979999542236328
+key12 TECHNOLOGY NULL NULL NULL -6.619999885559082
+key13 TECHNOLOGY fourteen NULL NULL -18.43000030517578
+key05 OFFICE SUPPLIES six NULL NULL -19.959999084472656
+key09 TECHNOLOGY ten e true -10.5600004196167
+key11 TECHNOLOGY twelve NULL NULL -10.8100004196167
+key01 FURNITURE two e true -9.3100004196167
+key06 OFFICE SUPPLIES NULL e NULL 10.930000305175781
+key16 TECHNOLOGY NULL NULL NULL -2.5999999046325684
+key04 OFFICE SUPPLIES five NULL false 12.930000305175781
+key14 TECHNOLOGY fifteen e false 6.840000152587891
+key02 OFFICE SUPPLIES three e true -12.170000076293945
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, (1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) AS sum_z_datepart_weekday_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, (1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.`__time`), '1995-01-01'), 7)) + 7), 7))) AS sum_z_datepart_weekday_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 1
+2004-07-04 17:00:00.0 US/Pacific key08 1
+2004-07-08 17:00:00.0 US/Pacific key00 5
+2004-07-12 17:00:00.0 US/Pacific key07 2
+2004-07-13 17:00:00.0 US/Pacific key10 3
+2004-07-13 17:00:00.0 US/Pacific key15 3
+2004-07-16 17:00:00.0 US/Pacific key12 6
+2004-07-19 17:00:00.0 US/Pacific key13 2
+2004-07-21 17:00:00.0 US/Pacific key05 4
+2004-07-23 17:00:00.0 US/Pacific key09 6
+2004-07-24 17:00:00.0 US/Pacific key11 7
+2004-07-25 17:00:00.0 US/Pacific key01 1
+2004-07-27 17:00:00.0 US/Pacific key06 3
+2004-07-27 17:00:00.0 US/Pacific key16 3
+2004-07-28 17:00:00.0 US/Pacific key04 4
+2004-07-30 17:00:00.0 US/Pacific key14 6
+2004-08-01 17:00:00.0 US/Pacific key02 1
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) - YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) AS BIGINT) AS sum_z_today_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) - YEAR(TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP(), 'yyyy-MM-dd HH:mm:ss'))) AS BIGINT) AS sum_z_today_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0
+key08 0
+key00 0
+key07 0
+key10 0
+key15 0
+key12 0
+key13 0
+key05 0
+key09 0
+key11 0
+key01 0
+key06 0
+key16 0
+key04 0
+key14 0
+key02 0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, MIN(Calcs.int0) AS min_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, MIN(Calcs.int0) AS min_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 1
+key01 0
+key02 0
+key03 0
+key04 7
+key05 3
+key06 8
+key07 0
+key08 0
+key09 8
+key10 4
+key11 10
+key12 0
+key13 4
+key14 11
+key15 4
+key16 8
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DAY(Calcs.`__time`) AS none_z_day_ok, DAY(Calcs.`__time`) AS sum_z_datepart_day_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DAY(Calcs.`__time`) AS none_z_day_ok, DAY(Calcs.`__time`) AS sum_z_datepart_day_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 4 4
+2004-07-04 17:00:00.0 US/Pacific key08 4 4
+2004-07-08 17:00:00.0 US/Pacific key00 8 8
+2004-07-12 17:00:00.0 US/Pacific key07 12 12
+2004-07-13 17:00:00.0 US/Pacific key10 13 13
+2004-07-13 17:00:00.0 US/Pacific key15 13 13
+2004-07-16 17:00:00.0 US/Pacific key12 16 16
+2004-07-19 17:00:00.0 US/Pacific key13 19 19
+2004-07-21 17:00:00.0 US/Pacific key05 21 21
+2004-07-23 17:00:00.0 US/Pacific key09 23 23
+2004-07-24 17:00:00.0 US/Pacific key11 24 24
+2004-07-25 17:00:00.0 US/Pacific key01 25 25
+2004-07-27 17:00:00.0 US/Pacific key06 27 27
+2004-07-27 17:00:00.0 US/Pacific key16 27 27
+2004-07-28 17:00:00.0 US/Pacific key04 28 28
+2004-07-30 17:00:00.0 US/Pacific key14 30 30
+2004-08-01 17:00:00.0 US/Pacific key02 1 1
+PREHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 17 920.059997406006 7.333154543356814
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq2_num_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq_num_nk, ABS(Calcs.num0) AS sum_abs_num0__ok, Calcs.num0 AS sum_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq2_num_nk, (Calcs.num0 = ABS(Calcs.num0)) AS none_z_num_eq_num_nk, ABS(Calcs.num0) AS sum_abs_num0__ok, Calcs.num0 AS sum_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false false 15.699999809265137 -15.699999809265137
+key08 false false 10.0 10.0
+key00 false false 12.300000190734863 12.300000190734863
+key07 false false 0.0 0.0
+key10 false false 0.0 0.0
+key15 false false 0.0 0.0
+key12 false false 0.0 0.0
+key13 false false 0.0 0.0
+key05 false false 3.5 -3.5
+key09 false false 0.0 0.0
+key11 false false 0.0 0.0
+key01 false false 12.300000190734863 -12.300000190734863
+key06 false false 0.0 0.0
+key16 false false 0.0 0.0
+key04 false false 3.5 3.5
+key14 false false 0.0 0.0
+key02 false false 15.699999809265137 15.699999809265137
+PREHOOK: query: SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS cnt_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS cnt_date3_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date3 IS NULL) THEN 0 WHEN NOT (Calcs.date3 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0 NULL key03 0
+1 1983-05-22 key08 1
+1 1986-03-20 key00 1
+0 NULL key07 0
+1 1999-08-20 key10 1
+0 NULL key15 0
+0 NULL key12 0
+1 1996-05-13 key13 1
+1 1979-04-01 key05 1
+0 NULL key09 0
+0 NULL key11 0
+0 NULL key01 0
+0 NULL key06 0
+1 1992-01-18 key16 1
+1 1996-03-07 key04 1
+1 1986-11-08 key14 1
+1 1997-02-02 key02 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 WHEN NOT (Calcs.str0 > Calcs.str1) THEN Calcs.str3 ELSE NULL END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 FURNITURE CLAMP ON LAMPS one e one
+key01 FURNITURE CLOCKS two e two
+key02 OFFICE SUPPLIES AIR PURIFIERS three e three
+key03 OFFICE SUPPLIES BINDER ACCESSORIES NULL e NULL
+key04 OFFICE SUPPLIES BINDER CLIPS five NULL five
+key05 OFFICE SUPPLIES BINDING MACHINES six NULL six
+key06 OFFICE SUPPLIES BINDING SUPPLIES NULL e NULL
+key07 OFFICE SUPPLIES BUSINESS ENVELOPES eight e eight
+key08 TECHNOLOGY ANSWERING MACHINES nine NULL nine
+key09 TECHNOLOGY BUSINESS COPIERS ten e ten
+key10 TECHNOLOGY CD-R MEDIA eleven e eleven
+key11 TECHNOLOGY CONFERENCE PHONES twelve NULL twelve
+key12 TECHNOLOGY CORDED KEYBOARDS NULL NULL NULL
+key13 TECHNOLOGY CORDLESS KEYBOARDS fourteen NULL fourteen
+key14 TECHNOLOGY DOT MATRIX PRINTERS fifteen e fifteen
+key15 TECHNOLOGY DVD sixteen e sixteen
+key16 TECHNOLOGY ERICSSON NULL NULL NULL
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, Calcs.str2 RLIKE CONCAT('.*', Calcs.str3, '.*') AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, Calcs.str2 RLIKE CONCAT('.*', Calcs.str3, '.*') AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL e NULL
+key08 nine NULL NULL
+key00 one e true
+key07 eight e true
+key10 eleven e true
+key15 sixteen e true
+key12 NULL NULL NULL
+key13 fourteen NULL NULL
+key05 six NULL NULL
+key09 ten e true
+key11 twelve NULL NULL
+key01 two e false
+key06 NULL e NULL
+key16 NULL NULL NULL
+key04 five NULL NULL
+key14 fifteen e true
+key02 three e true
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, COUNT(DISTINCT Calcs.num0) AS ctd_num0_ok, COUNT(DISTINCT Calcs.num0) AS usr_z_countd_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, COUNT(DISTINCT Calcs.num0) AS ctd_num0_ok, COUNT(DISTINCT Calcs.num0) AS usr_z_countd_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 1 1
+key01 1 1
+key02 1 1
+key03 1 1
+key04 1 1
+key05 1 1
+key06 1 1
+key07 1 1
+key08 1 1
+key09 1 1
+key10 1 1
+key11 1 1
+key12 1 1
+key13 1 1
+key14 1 1
+key15 1 1
+key16 1 1
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_dayofyear_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_dayofyear_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 4
+2004-07-12 17:00:00.0 US/Pacific key07 8
+2004-07-13 17:00:00.0 US/Pacific key10 9
+2004-07-13 17:00:00.0 US/Pacific key15 9
+2004-07-16 17:00:00.0 US/Pacific key12 12
+2004-07-19 17:00:00.0 US/Pacific key13 15
+2004-07-21 17:00:00.0 US/Pacific key05 17
+2004-07-23 17:00:00.0 US/Pacific key09 19
+2004-07-24 17:00:00.0 US/Pacific key11 20
+2004-07-25 17:00:00.0 US/Pacific key01 21
+2004-07-27 17:00:00.0 US/Pacific key06 23
+2004-07-27 17:00:00.0 US/Pacific key16 23
+2004-07-28 17:00:00.0 US/Pacific key04 24
+2004-07-30 17:00:00.0 US/Pacific key14 26
+2004-08-01 17:00:00.0 US/Pacific key02 28
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num4 AS sum_num4_ok, ROUND(Calcs.num4,1) AS sum_z_round_num_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num4 AS sum_num4_ok, ROUND(Calcs.num4,1) AS sum_z_round_num_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -6.050000190734863 -6.1
+key08 4.769999980926514 4.8
+key00 0.0 0.0
+key07 -10.239999771118164 -10.2
+key10 19.389999389648438 19.4
+key15 6.75 6.8
+key12 3.380000114440918 3.4
+key13 0.0 0.0
+key05 10.710000038146973 10.7
+key09 0.0 0.0
+key11 3.819999933242798 3.8
+key01 10.850000381469727 10.9
+key06 0.0 0.0
+key16 0.0 0.0
+key04 8.319999694824219 8.3
+key14 -14.210000038146973 -14.2
+key02 -13.470000267028809 -13.5
+PREHOOK: query: SELECT Calcs.date2 AS none_date2_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.date3) + CAST((MONTH(Calcs.date3) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR(Calcs.date2) + CAST((MONTH(Calcs.date2) - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date2 AS none_date2_ok, Calcs.date3 AS none_date3_ok, Calcs.key AS none_key_nk, CAST((4 * YEAR(Calcs.date3) + CAST((MONTH(Calcs.date3) - 1) / 3 + 1 AS BIGINT)) - (4 * YEAR(Calcs.date2) + CAST((MONTH(Calcs.date2) - 1) / 3 + 1 AS BIGINT)) AS BIGINT) AS sum_z_datediff_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1980-07-26 NULL key03 0
+1976-09-09 1983-05-22 key08 0
+1977-04-20 1986-03-20 key00 0
+1974-05-03 NULL key07 0
+1974-03-17 1999-08-20 key10 0
+1995-06-04 NULL key15 0
+2001-02-04 NULL key12 0
+1988-01-05 1996-05-13 key13 0
+1980-11-07 1979-04-01 key05 0
+1998-08-12 NULL key09 0
+1994-04-20 NULL key11 0
+1995-09-03 NULL key01 0
+1977-02-08 NULL key06 0
+2002-04-27 1992-01-18 key16 0
+1997-05-30 1996-03-07 key04 0
+1972-07-12 1986-11-08 key14 0
+1997-09-19 1997-02-02 key02 0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(Calcs.num4 AS STRING) AS none_z_str_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(Calcs.num4 AS STRING) AS none_z_str_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -6.050000190734863 -6.050000190734863
+key08 4.769999980926514 4.769999980926514
+key00 0.0 0.0
+key07 -10.239999771118164 -10.239999771118164
+key10 19.389999389648438 19.389999389648438
+key15 6.75 6.75
+key12 3.380000114440918 3.380000114440918
+key13 0.0 0.0
+key05 10.710000038146973 10.710000038146973
+key09 0.0 0.0
+key11 3.819999933242798 3.819999933242798
+key01 10.850000381469727 10.850000381469727
+key06 0.0 0.0
+key16 0.0 0.0
+key04 8.319999694824219 8.319999694824219
+key14 -14.210000038146973 -14.210000038146973
+key02 -13.470000267028809 -13.470000267028809
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) < Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_min_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) < Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_min_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 office supplies NULL NULL
+key08 technology nine nine
+key00 furniture one furniture
+key07 office supplies eight eight
+key10 technology eleven eleven
+key15 technology sixteen sixteen
+key12 technology NULL NULL
+key13 technology fourteen fourteen
+key05 office supplies six office supplies
+key09 technology ten technology
+key11 technology twelve technology
+key01 furniture two furniture
+key06 office supplies NULL NULL
+key16 technology NULL NULL
+key04 office supplies five five
+key14 technology fifteen fifteen
+key02 office supplies three office supplies
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, (CASE WHEN Calcs.int0 < 0 AND FLOOR(0.5) <> 0.5 THEN NULL ELSE POW(Calcs.int0,0.5) END) AS sum_z_power_int_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0 0.0
+key08 0 0.0
+key00 1 1.0
+key07 0 0.0
+key10 4 2.0
+key15 4 2.0
+key12 0 0.0
+key13 4 2.0
+key05 3 1.7320508075688772
+key09 8 2.8284271247461903
+key11 10 3.1622776601683795
+key01 0 0.0
+key06 8 2.8284271247461903
+key16 8 2.8284271247461903
+key04 7 2.6457513110645907
+key14 11 3.3166247903554
+key02 0 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_z_len_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_z_len_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL 0
+key08 nine 4
+key00 one 3
+key07 eight 5
+key10 eleven 6
+key15 sixteen 7
+key12 NULL 0
+key13 fourteen 8
+key05 six 3
+key09 ten 3
+key11 twelve 6
+key01 two 3
+key06 NULL 0
+key16 NULL 0
+key04 five 4
+key14 fifteen 7
+key02 three 5
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ROUND(Calcs.num0) AS sum_z_round_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ROUND(Calcs.num0) AS sum_z_round_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 -16.0
+key08 10.0 10.0
+key00 12.300000190734863 12.0
+key07 0.0 0.0
+key10 0.0 0.0
+key15 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key05 -3.5 -4.0
+key09 0.0 0.0
+key11 0.0 0.0
+key01 -12.300000190734863 -12.0
+key06 0.0 0.0
+key16 0.0 0.0
+key04 3.5 4.0
+key14 0.0 0.0
+key02 15.699999809265137 16.0
+PREHOOK: query: SELECT FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2) AS daydiffs1__bin_, FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3) AS daydiffs2__bin_, FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4) AS daydiffs3__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2) AS yeardiffs1__bin_, FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3) AS yeardiffs2__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) AS yeardiffs3__bin_ FROM druid_tableau.calcs Calcs GROUP BY FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2), FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3), FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2), FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2) AS daydiffs1__bin_, FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3) AS daydiffs2__bin_, FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4) AS daydiffs3__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2) AS yeardiffs1__bin_, FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3) AS yeardiffs2__bin_, FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4) AS yeardiffs3__bin_ FROM druid_tableau.calcs Calcs GROUP BY FLOOR(DATEDIFF(Calcs.date0, Calcs.`__time`) / 2), FLOOR(DATEDIFF(Calcs.`__time`, Calcs.date0) / 3), FLOOR(DATEDIFF(Calcs.date0, Calcs.date1) / 4), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.`__time`) AS BIGINT) / 2), FLOOR(CAST(YEAR(Calcs.`__time`) - YEAR(Calcs.date0) AS BIGINT) / 3), FLOOR(CAST(YEAR(Calcs.date0) - YEAR(Calcs.date1) AS BIGINT) / 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL NULL NULL NULL NULL NULL
+-5855 3903 -2899 -16 10 -8
+-5245 3496 -2593 -15 9 -8
+-42 28 3 0 0 0
+-20 13 18 0 0 0
+-15 10 15 0 0 0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_weekday_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_weekday_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 4
+2004-07-12 17:00:00.0 US/Pacific key07 8
+2004-07-13 17:00:00.0 US/Pacific key10 9
+2004-07-13 17:00:00.0 US/Pacific key15 9
+2004-07-16 17:00:00.0 US/Pacific key12 12
+2004-07-19 17:00:00.0 US/Pacific key13 15
+2004-07-21 17:00:00.0 US/Pacific key05 17
+2004-07-23 17:00:00.0 US/Pacific key09 19
+2004-07-24 17:00:00.0 US/Pacific key11 20
+2004-07-25 17:00:00.0 US/Pacific key01 21
+2004-07-27 17:00:00.0 US/Pacific key06 23
+2004-07-27 17:00:00.0 US/Pacific key16 23
+2004-07-28 17:00:00.0 US/Pacific key04 24
+2004-07-30 17:00:00.0 US/Pacific key14 26
+2004-08-01 17:00:00.0 US/Pacific key02 28
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (1 IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (1 IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(Calcs.str2,CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL e NULL
+key08 nine NULL NULL
+key00 one e false
+key07 eight e true
+key10 eleven e true
+key15 sixteen e false
+key12 NULL NULL NULL
+key13 fourteen NULL NULL
+key05 six NULL NULL
+key09 ten e false
+key11 twelve NULL NULL
+key01 two e false
+key06 NULL e NULL
+key16 NULL NULL NULL
+key04 five NULL NULL
+key14 fifteen e false
+key02 three e false
+PREHOOK: query: SELECT MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT MAX(Calcs.str2) AS temp_z_max_str2___3598104523__0_, MAX(LENGTH(Calcs.str2)) AS max_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+two 8
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS STRING) AS none_z_datename_quarter_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS STRING) AS none_z_datename_quarter_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 3
+2004-07-04 17:00:00.0 US/Pacific key08 3
+2004-07-08 17:00:00.0 US/Pacific key00 3
+2004-07-12 17:00:00.0 US/Pacific key07 3
+2004-07-13 17:00:00.0 US/Pacific key10 3
+2004-07-13 17:00:00.0 US/Pacific key15 3
+2004-07-16 17:00:00.0 US/Pacific key12 3
+2004-07-19 17:00:00.0 US/Pacific key13 3
+2004-07-21 17:00:00.0 US/Pacific key05 3
+2004-07-23 17:00:00.0 US/Pacific key09 3
+2004-07-24 17:00:00.0 US/Pacific key11 3
+2004-07-25 17:00:00.0 US/Pacific key01 3
+2004-07-27 17:00:00.0 US/Pacific key06 3
+2004-07-27 17:00:00.0 US/Pacific key16 3
+2004-07-28 17:00:00.0 US/Pacific key04 3
+2004-07-30 17:00:00.0 US/Pacific key14 3
+2004-08-01 17:00:00.0 US/Pacific key02 3
+PREHOOK: query: SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+1969
+PREHOOK: query: SELECT (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) AS str2__group_, AVG(Calcs.num0) AS avg_num0_ok, COUNT(Calcs.num0) AS cnt_num0_ok, SUM(Calcs.num0) AS sum_num0_ok FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END) AS str2__group_, AVG(Calcs.num0) AS avg_num0_ok, COUNT(Calcs.num0) AS cnt_num0_ok, SUM(Calcs.num0) AS sum_num0_ok FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'fifteen' OR Calcs.str2 = 'five' OR Calcs.str2 = 'fourteen') THEN 'fifteen' WHEN (Calcs.str2 = 'nine') THEN 'nine' WHEN (Calcs.str2 = 'one') THEN 'one' WHEN (Calcs.str2 = 'six' OR Calcs.str2 = 'sixteen') THEN 'six' WHEN (Calcs.str2 = 'ten' OR Calcs.str2 = 'three' OR Calcs.str2 = 'twelve' OR Calcs.str2 = 'two') THEN 'ten' ELSE NULL END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL -3.924999952316284 4 -15.699999809265137
+eight 0.0 2 0.0
+fifteen 1.1666666666666667 3 3.5
+nine 10.0 1 10.0
+one 12.300000190734863 1 12.300000190734863
+six -1.75 2 -3.5
+ten 0.8499999046325684 4 3.3999996185302734
+PREHOOK: query: SELECT SUM((((((((((CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT))) AS sum_maxint_sum_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM((((((((((CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT)) + CAST((POW(CAST(2 AS DOUBLE),31) - 1) AS BIGINT))) AS sum_maxint_sum_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+365072219990
+PREHOOK: query: SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num1 AS none_num1_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num1 AS none_num1_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+false false 2.4700000286102295
+false false 7.099999904632568
+false false 7.119999885559082
+false false 7.429999828338623
+false false 9.050000190734863
+false false 9.380000114440918
+false false 9.470000267028809
+false false 9.779999732971191
+false false 6.710000038146973
+false false 8.420000076293945
+false false 10.319999694824219
+false false 12.050000190734863
+false false 10.369999885559082
+false false 11.380000114440918
+false false 12.399999618530273
+false false 16.420000076293945
+false false 16.809999465942383
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 = (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_eq_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 = (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_eq_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL e NULL -7.25
+key08 nine NULL NULL -13.380000114440918
+key00 one e false -11.520000457763672
+key07 eight e true 3.640000104904175
+key10 eleven e false -4.789999961853027
+key15 sixteen e false -10.979999542236328
+key12 NULL NULL NULL -6.619999885559082
+key13 fourteen NULL NULL -18.43000030517578
+key05 six NULL NULL -19.959999084472656
+key09 ten e false -10.5600004196167
+key11 twelve NULL NULL -10.8100004196167
+key01 two e false -9.3100004196167
+key06 NULL e NULL 10.930000305175781
+key16 NULL NULL NULL -2.5999999046325684
+key04 five NULL true 12.930000305175781
+key14 fifteen e true 6.840000152587891
+key02 three e false -12.170000076293945
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) AS none_z_if_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.date2 AS none_date2_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END) AS none_z_if_cmp_date_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.date2, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE Calcs.date2 END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL 2004-04-06 1980-11-07 key05 -3.5 9.380000114440918 2004-04-06
+NULL 2004-04-07 1977-02-08 key06 0.0 16.420000076293945 2004-04-07
+NULL 2004-04-08 1974-05-03 key07 0.0 11.380000114440918 2004-04-08
+NULL 2004-04-09 1976-09-09 key08 10.0 9.470000267028809 NULL
+NULL 2004-04-10 1998-08-12 key09 0.0 12.399999618530273 2004-04-10
+NULL 2004-04-11 1974-03-17 key10 0.0 10.319999694824219 2004-04-11
+NULL 2004-04-12 1994-04-20 key11 0.0 2.4700000286102295 2004-04-12
+NULL 2004-04-13 2001-02-04 key12 0.0 12.050000190734863 2004-04-13
+NULL 2004-04-14 1988-01-05 key13 0.0 10.369999885559082 2004-04-14
+NULL 2004-04-15 1972-07-12 key14 0.0 7.099999904632568 2004-04-15
+NULL 2004-04-16 1995-06-04 key15 0.0 16.809999465942383 2004-04-16
+NULL 2004-04-17 2002-04-27 key16 0.0 7.119999885559082 2004-04-17
+1972-07-04 2004-04-02 1995-09-03 key01 -12.300000190734863 6.710000038146973 2004-04-02
+1975-11-12 2004-04-03 1997-09-19 key02 15.699999809265137 9.779999732971191 1975-11-12
+2004-04-15 2004-04-01 1977-04-20 key00 12.300000190734863 8.420000076293945 2004-04-15
+2004-06-04 2004-04-04 1980-07-26 key03 -15.699999809265137 7.429999828338623 2004-04-04
+2004-06-19 2004-04-05 1997-05-30 key04 3.5 9.050000190734863 2004-04-05
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) / LOG10(2) ELSE NULL END) AS sum_z_log2_num_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_log_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) / LOG10(2) ELSE NULL END) AS sum_z_log2_num_ok, (CASE WHEN Calcs.num0 > 0 THEN LOG10(Calcs.num0) ELSE CAST(NULL AS DOUBLE) END) AS sum_z_log_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 NULL NULL
+key08 10.0 3.321928094887362 1.0
+key00 12.300000190734863 3.6205864328236044 1.0899051181739587
+key07 0.0 NULL NULL
+key10 0.0 NULL NULL
+key15 0.0 NULL NULL
+key12 0.0 NULL NULL
+key13 0.0 NULL NULL
+key05 -3.5 NULL NULL
+key09 0.0 NULL NULL
+key11 0.0 NULL NULL
+key01 -12.300000190734863 NULL NULL
+key06 0.0 NULL NULL
+key16 0.0 NULL NULL
+key04 3.5 1.8073549220576042 0.5440680443502757
+key14 0.0 NULL NULL
+key02 15.699999809265137 3.97269263647737 1.1958996471331127
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ABS(Calcs.num0) AS sum_z_abs_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, ABS(Calcs.num0) AS sum_z_abs_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 15.699999809265137
+key08 10.0 10.0
+key00 12.300000190734863 12.300000190734863
+key07 0.0 0.0
+key10 0.0 0.0
+key15 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key05 -3.5 3.5
+key09 0.0 0.0
+key11 0.0 0.0
+key01 -12.300000190734863 12.300000190734863
+key06 0.0 0.0
+key16 0.0 0.0
+key04 3.5 3.5
+key14 0.0 0.0
+key02 15.699999809265137 15.699999809265137
+PREHOOK: query: SELECT Calcs.num0 AS temp_z_stdev_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdev_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdev_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_z_stdev_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_stdev_num0___2730138885__0_, (Calcs.num0 * Calcs.num0) AS temp_z_stdev_num0___4071133194__0_, Calcs.key AS none_key_nk, (CASE WHEN false THEN CAST(0. AS DOUBLE) WHEN NOT false THEN CAST(NULL AS DOUBLE) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 246.48999401092533 key03 NULL
+10.0 1 100.0 key08 NULL
+12.300000190734863 1 151.29000469207767 key00 NULL
+0.0 1 0.0 key07 NULL
+0.0 1 0.0 key10 NULL
+0.0 1 0.0 key15 NULL
+0.0 1 0.0 key12 NULL
+0.0 1 0.0 key13 NULL
+-3.5 1 12.25 key05 NULL
+0.0 1 0.0 key09 NULL
+0.0 1 0.0 key11 NULL
+-12.300000190734863 1 151.29000469207767 key01 NULL
+0.0 1 0.0 key06 NULL
+0.0 1 0.0 key16 NULL
+3.5 1 12.25 key04 NULL
+0.0 1 0.0 key14 NULL
+15.699999809265137 1 246.48999401092533 key02 NULL
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) AS none_z_case_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0))
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0)) AS none_z_case_cmp_str_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1, Calcs.str0, Calcs.str2, Calcs.str3, IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN true THEN 1 WHEN NOT true THEN 0 ELSE NULL END)),Calcs.str2,IF(((CASE WHEN (Calcs.num0 > Calcs.num1) THEN 1 WHEN NOT (Calcs.num0 > Calcs.num1) THEN 0 ELSE NULL END) = (CASE WHEN false THEN 1 WHEN NOT false THEN 0 ELSE NULL END)),Calcs.str3,Calcs.str0))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 8.420000076293945 FURNITURE one e one
+key01 -12.300000190734863 6.710000038146973 FURNITURE two e e
+key02 15.699999809265137 9.779999732971191 OFFICE SUPPLIES three e three
+key03 -15.699999809265137 7.429999828338623 OFFICE SUPPLIES NULL e e
+key04 3.5 9.050000190734863 OFFICE SUPPLIES five NULL NULL
+key05 -3.5 9.380000114440918 OFFICE SUPPLIES six NULL NULL
+key06 0.0 16.420000076293945 OFFICE SUPPLIES NULL e e
+key07 0.0 11.380000114440918 OFFICE SUPPLIES eight e e
+key08 10.0 9.470000267028809 TECHNOLOGY nine NULL nine
+key09 0.0 12.399999618530273 TECHNOLOGY ten e e
+key10 0.0 10.319999694824219 TECHNOLOGY eleven e e
+key11 0.0 2.4700000286102295 TECHNOLOGY twelve NULL NULL
+key12 0.0 12.050000190734863 TECHNOLOGY NULL NULL NULL
+key13 0.0 10.369999885559082 TECHNOLOGY fourteen NULL NULL
+key14 0.0 7.099999904632568 TECHNOLOGY fifteen e e
+key15 0.0 16.809999465942383 TECHNOLOGY sixteen e e
+key16 0.0 7.119999885559082 TECHNOLOGY NULL NULL NULL
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS STRING) AS none_z_datename_dayofyear_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS STRING) AS none_z_datename_dayofyear_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 186
+2004-07-04 17:00:00.0 US/Pacific key08 186
+2004-07-08 17:00:00.0 US/Pacific key00 190
+2004-07-12 17:00:00.0 US/Pacific key07 194
+2004-07-13 17:00:00.0 US/Pacific key10 195
+2004-07-13 17:00:00.0 US/Pacific key15 195
+2004-07-16 17:00:00.0 US/Pacific key12 198
+2004-07-19 17:00:00.0 US/Pacific key13 201
+2004-07-21 17:00:00.0 US/Pacific key05 203
+2004-07-23 17:00:00.0 US/Pacific key09 205
+2004-07-24 17:00:00.0 US/Pacific key11 206
+2004-07-25 17:00:00.0 US/Pacific key01 207
+2004-07-27 17:00:00.0 US/Pacific key06 209
+2004-07-27 17:00:00.0 US/Pacific key16 209
+2004-07-28 17:00:00.0 US/Pacific key04 210
+2004-07-30 17:00:00.0 US/Pacific key14 212
+2004-08-01 17:00:00.0 US/Pacific key02 214
+PREHOOK: query: SELECT COUNT(Calcs.date3) AS cnt_date3_ok, COUNT(Calcs.date3) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT COUNT(Calcs.date3) AS cnt_date3_ok, COUNT(Calcs.date3) AS usr_z_count_date3__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+9 9
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num4 IS NULL) AS none_z_isnull_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num4 IS NULL) AS none_z_isnull_num_nk, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false -6.050000190734863
+key08 false 4.769999980926514
+key00 false 0.0
+key07 false -10.239999771118164
+key10 false 19.389999389648438
+key15 false 6.75
+key12 false 3.380000114440918
+key13 false 0.0
+key05 false 10.710000038146973
+key09 false 0.0
+key11 false 3.819999933242798
+key01 false 10.850000381469727
+key06 false 0.0
+key16 false 0.0
+key04 false 8.319999694824219
+key14 false -14.210000038146973
+key02 false -13.470000267028809
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS sum_z_float_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS sum_z_float_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0 -6.050000190734863 -6.05
+key08 0 4.769999980926514 4.77
+key00 1 0.0 0.01
+key07 0 -10.239999771118164 -10.24
+key10 4 19.389999389648438 19.394
+key15 4 6.75 6.754
+key12 0 3.380000114440918 3.38
+key13 4 0.0 0.04
+key05 3 10.710000038146973 10.713
+key09 8 0.0 0.08
+key11 10 3.819999933242798 3.821
+key01 0 10.850000381469727 10.85
+key06 8 0.0 0.08
+key16 8 0.0 0.08
+key04 7 8.319999694824219 8.327
+key14 11 -14.210000038146973 -14.2111
+key02 0 -13.470000267028809 -13.47
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(SECOND(Calcs.`__time`), 0), 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(SECOND(Calcs.`__time`), 0), 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04 17:00:00
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04 17:00:00
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-08 17:00:00
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-12 17:00:00
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-23 17:00:00
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-24 17:00:00
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25 17:00:00
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-27 17:00:00
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, floor((datediff(Calcs.`__time`,'1995-01-01') - ( pmod(datediff(Calcs.`__time`, '1995-01-01'), 7) + 1) - datediff('2004-07-04','1995-01-01') + (pmod(datediff('2004-07-04', '1995-01-01'), 7) + 1))/7) AS sum_z_datediff_week_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, floor((datediff(Calcs.`__time`,'1995-01-01') - ( pmod(datediff(Calcs.`__time`, '1995-01-01'), 7) + 1) - datediff('2004-07-04','1995-01-01') + (pmod(datediff('2004-07-04', '1995-01-01'), 7) + 1))/7) AS sum_z_datediff_week_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 1
+2004-07-13 17:00:00.0 US/Pacific key10 1
+2004-07-13 17:00:00.0 US/Pacific key15 1
+2004-07-16 17:00:00.0 US/Pacific key12 1
+2004-07-19 17:00:00.0 US/Pacific key13 2
+2004-07-21 17:00:00.0 US/Pacific key05 2
+2004-07-23 17:00:00.0 US/Pacific key09 2
+2004-07-24 17:00:00.0 US/Pacific key11 2
+2004-07-25 17:00:00.0 US/Pacific key01 3
+2004-07-27 17:00:00.0 US/Pacific key06 3
+2004-07-27 17:00:00.0 US/Pacific key16 3
+2004-07-28 17:00:00.0 US/Pacific key04 3
+2004-07-30 17:00:00.0 US/Pacific key14 3
+2004-08-01 17:00:00.0 US/Pacific key02 4
+PREHOOK: query: SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs WHERE (YEAR(Calcs.date0) IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT YEAR(Calcs.date0) AS yr_date0_ok FROM druid_tableau.calcs Calcs WHERE (YEAR(Calcs.date0) IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, COALESCE(Calcs.str2, 'i\'m null') AS none_z_ifnull_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, COALESCE(Calcs.str2, 'i\'m null') AS none_z_ifnull_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL i'm null
+key08 nine nine
+key00 one one
+key07 eight eight
+key10 eleven eleven
+key15 sixteen sixteen
+key12 NULL i'm null
+key13 fourteen fourteen
+key05 six six
+key09 ten ten
+key11 twelve twelve
+key01 two two
+key06 NULL i'm null
+key16 NULL i'm null
+key04 five five
+key14 fifteen fifteen
+key02 three three
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_month_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_month_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+Calcs.int1)/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+Calcs.int1, 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific -4 key03 2004-03-04 17:00:00
+2004-07-04 17:00:00.0 US/Pacific 3 key08 2004-10-04 17:00:00
+2004-07-08 17:00:00.0 US/Pacific -3 key00 2004-04-08 17:00:00
+2004-07-12 17:00:00.0 US/Pacific 2 key07 2004-09-12 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific 0 key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific 0 key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific 0 key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific 3 key09 2004-10-23 17:00:00
+2004-07-24 17:00:00.0 US/Pacific -8 key11 2003-11-24 17:00:00
+2004-07-25 17:00:00.0 US/Pacific -6 key01 2004-01-25 17:00:00
+2004-07-27 17:00:00.0 US/Pacific -9 key16 2003-10-27 17:00:00
+2004-07-27 17:00:00.0 US/Pacific 0 key06 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific 0 key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific 0 key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific 0 key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS sum_z_datepart_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((MONTH(Calcs.`__time`) - 1) / 3 + 1 AS BIGINT) AS sum_z_datepart_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 3
+2004-07-04 17:00:00.0 US/Pacific key08 3
+2004-07-08 17:00:00.0 US/Pacific key00 3
+2004-07-12 17:00:00.0 US/Pacific key07 3
+2004-07-13 17:00:00.0 US/Pacific key10 3
+2004-07-13 17:00:00.0 US/Pacific key15 3
+2004-07-16 17:00:00.0 US/Pacific key12 3
+2004-07-19 17:00:00.0 US/Pacific key13 3
+2004-07-21 17:00:00.0 US/Pacific key05 3
+2004-07-23 17:00:00.0 US/Pacific key09 3
+2004-07-24 17:00:00.0 US/Pacific key11 3
+2004-07-25 17:00:00.0 US/Pacific key01 3
+2004-07-27 17:00:00.0 US/Pacific key06 3
+2004-07-27 17:00:00.0 US/Pacific key16 3
+2004-07-28 17:00:00.0 US/Pacific key04 3
+2004-07-30 17:00:00.0 US/Pacific key14 3
+2004-08-01 17:00:00.0 US/Pacific key02 3
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) > Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_max_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, LOWER(Calcs.str0) AS none_lower_str0__nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (LOWER(Calcs.str0) IS NULL) OR (Calcs.str2 IS NULL) THEN NULL WHEN LOWER(Calcs.str0) > Calcs.str2 THEN LOWER(Calcs.str0) ELSE Calcs.str2 END) AS none_z_max_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 office supplies NULL NULL
+key08 technology nine technology
+key00 furniture one one
+key07 office supplies eight office supplies
+key10 technology eleven technology
+key15 technology sixteen technology
+key12 technology NULL NULL
+key13 technology fourteen technology
+key05 office supplies six six
+key09 technology ten ten
+key11 technology twelve twelve
+key01 furniture two two
+key06 office supplies NULL NULL
+key16 technology NULL NULL
+key04 office supplies five office supplies
+key14 technology fifteen technology
+key02 office supplies three three
+PREHOOK: query: SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs WHERE (NOT ((Calcs.str2 IS NULL) OR ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'six')))) GROUP BY Calcs.str2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs WHERE (NOT ((Calcs.str2 IS NULL) OR ((Calcs.str2 >= 'eight') AND (Calcs.str2 <= 'six')))) GROUP BY Calcs.str2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+sixteen
+ten
+three
+twelve
+two
+PREHOOK: query: SELECT MONTH(Calcs.`__time`) AS mn_datetime0_ok FROM druid_tableau.calcs Calcs WHERE (MONTH(Calcs.`__time`) IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT MONTH(Calcs.`__time`) AS mn_datetime0_ok FROM druid_tableau.calcs Calcs WHERE (MONTH(Calcs.`__time`) IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, Calcs.str1 RLIKE CONCAT('.*', 'IN', '.*') AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, Calcs.str1 RLIKE CONCAT('.*', 'IN', '.*') AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 BINDER ACCESSORIES true
+key08 ANSWERING MACHINES true
+key00 CLAMP ON LAMPS false
+key07 BUSINESS ENVELOPES true
+key10 CD-R MEDIA false
+key15 DVD false
+key12 CORDED KEYBOARDS false
+key13 CORDLESS KEYBOARDS false
+key05 BINDING MACHINES true
+key09 BUSINESS COPIERS true
+key11 CONFERENCE PHONES false
+key01 CLOCKS false
+key06 BINDING SUPPLIES true
+key16 ERICSSON false
+key04 BINDER CLIPS true
+key14 DOT MATRIX PRINTERS true
+key02 AIR PURIFIERS false
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), CAST((-1) AS DOUBLE)) AS none_z_ifnull_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), CAST((-1) AS DOUBLE)) AS none_z_ifnull_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -6.050000190734863 -6.050000190734863
+key08 4.769999980926514 4.769999980926514
+key00 0.0 0.0
+key07 -10.239999771118164 -10.239999771118164
+key10 19.389999389648438 19.389999389648438
+key15 6.75 6.75
+key12 3.380000114440918 3.380000114440918
+key13 0.0 0.0
+key05 10.710000038146973 10.710000038146973
+key09 0.0 0.0
+key11 3.819999933242798 3.819999933242798
+key01 10.850000381469727 10.850000381469727
+key06 0.0 0.0
+key16 0.0 0.0
+key04 8.319999694824219 8.319999694824219
+key14 -14.210000038146973 -14.210000038146973
+key02 -13.470000267028809 -13.470000267028809
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 12.300000190734863
+key01 -12.300000190734863 -12.300000190734863
+key02 15.699999809265137 15.699999809265137
+key03 -15.699999809265137 -15.699999809265137
+key04 3.5 3.5
+key05 -3.5 -3.5
+key06 0.0 0.0
+key07 0.0 0.0
+key08 10.0 10.0
+key09 0.0 0.0
+key10 0.0 0.0
+key11 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key14 0.0 0.0
+key15 0.0 0.0
+key16 0.0 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str1 AS none_str1_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END) AS none_z_if_cmp_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str0, Calcs.str1, Calcs.str2, Calcs.str3, (CASE WHEN (Calcs.str0 > Calcs.str1) THEN Calcs.str2 ELSE Calcs.str3 END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 FURNITURE CLAMP ON LAMPS one e one
+key01 FURNITURE CLOCKS two e two
+key02 OFFICE SUPPLIES AIR PURIFIERS three e three
+key03 OFFICE SUPPLIES BINDER ACCESSORIES NULL e NULL
+key04 OFFICE SUPPLIES BINDER CLIPS five NULL five
+key05 OFFICE SUPPLIES BINDING MACHINES six NULL six
+key06 OFFICE SUPPLIES BINDING SUPPLIES NULL e NULL
+key07 OFFICE SUPPLIES BUSINESS ENVELOPES eight e eight
+key08 TECHNOLOGY ANSWERING MACHINES nine NULL nine
+key09 TECHNOLOGY BUSINESS COPIERS ten e ten
+key10 TECHNOLOGY CD-R MEDIA eleven e eleven
+key11 TECHNOLOGY CONFERENCE PHONES twelve NULL twelve
+key12 TECHNOLOGY CORDED KEYBOARDS NULL NULL NULL
+key13 TECHNOLOGY CORDLESS KEYBOARDS fourteen NULL fourteen
+key14 TECHNOLOGY DOT MATRIX PRINTERS fifteen e fifteen
+key15 TECHNOLOGY DVD sixteen e sixteen
+key16 TECHNOLOGY ERICSSON NULL NULL NULL
+PREHOOK: query: SELECT (32000 + Calcs.num4) AS none_bignum_ok FROM druid_tableau.calcs Calcs GROUP BY (32000 + Calcs.num4)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (32000 + Calcs.num4) AS none_bignum_ok FROM druid_tableau.calcs Calcs GROUP BY (32000 + Calcs.num4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+31985.789999961853
+31986.52999973297
+31989.760000228882
+31993.949999809265
+32000.0
+32003.38000011444
+32003.819999933243
+32004.769999980927
+32006.75
+32008.319999694824
+32010.710000038147
+32010.85000038147
+32019.38999938965
+PREHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 17 920.059997406006 53.77515555675468
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04 17:00:00
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04 17:00:00
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-08 17:00:00
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-12 17:00:00
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-23 17:00:00
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-24 17:00:00
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25 17:00:00
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-27 17:00:00
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) IS NULL) OR (LENGTH('ES') IS NULL) THEN NULL WHEN LENGTH('ES') < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str1),CAST(1 AS INT),CAST(LENGTH('ES') AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str1),CAST((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) AS INT),CAST(LENGTH('ES') AS INT)) END) = 'ES' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) IS NULL) OR (LENGTH('ES') IS NULL) THEN NULL WHEN LENGTH('ES') < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str1),CAST(1 AS INT),CAST(LENGTH('ES') AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str1),CAST((CASE WHEN (LENGTH(Calcs.str1) - LENGTH('ES')) < 0 THEN 1 ELSE LENGTH(Calcs.str1) - LENGTH('ES') + 1 END) AS INT),CAST(LENGTH('ES') AS INT)) END) = 'ES' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 BINDER ACCESSORIES true
+key08 ANSWERING MACHINES true
+key00 CLAMP ON LAMPS false
+key07 BUSINESS ENVELOPES true
+key10 CD-R MEDIA false
+key15 DVD false
+key12 CORDED KEYBOARDS false
+key13 CORDLESS KEYBOARDS false
+key05 BINDING MACHINES true
+key09 BUSINESS COPIERS false
+key11 CONFERENCE PHONES true
+key01 CLOCKS false
+key06 BINDING SUPPLIES true
+key16 ERICSSON false
+key04 BINDER CLIPS false
+key14 DOT MATRIX PRINTERS false
+key02 AIR PURIFIERS false
+PREHOOK: query: SELECT MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 -15.699999809265137
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str2),CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str2),CAST((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (CASE WHEN ((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) IS NULL) OR (LENGTH(Calcs.str3) IS NULL) THEN NULL WHEN LENGTH(Calcs.str3) < 1 THEN '' WHEN (CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) < 1 THEN SUBSTRING(RTRIM(Calcs.str2),CAST(1 AS INT),CAST(LENGTH(Calcs.str3) AS INT)) ELSE SUBSTRING(RTRIM(Calcs.str2),CAST((CASE WHEN (LENGTH(Calcs.str2) - LENGTH(Calcs.str3)) < 0 THEN 1 ELSE LENGTH(Calcs.str2) - LENGTH(Calcs.str3) + 1 END) AS INT),CAST(LENGTH(Calcs.str3) AS INT)) END) = Calcs.str3 AS none_z_startswith_str_str__copy__nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL e NULL
+key08 nine NULL NULL
+key00 one e true
+key07 eight e false
+key10 eleven e false
+key15 sixteen e false
+key12 NULL NULL NULL
+key13 fourteen NULL NULL
+key05 six NULL NULL
+key09 ten e false
+key11 twelve NULL NULL
+key01 two e false
+key06 NULL e NULL
+key16 NULL NULL NULL
+key04 five NULL NULL
+key14 fifteen e false
+key02 three e true
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, UPPER(Calcs.str2) AS none_z_upper_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, UPPER(Calcs.str2) AS none_z_upper_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL NULL
+key08 nine NINE
+key00 one ONE
+key07 eight EIGHT
+key10 eleven ELEVEN
+key15 sixteen SIXTEEN
+key12 NULL NULL
+key13 fourteen FOURTEEN
+key05 six SIX
+key09 ten TEN
+key11 twelve TWELVE
+key01 two TWO
+key06 NULL NULL
+key16 NULL NULL
+key04 five FIVE
+key14 fifteen FIFTEEN
+key02 three THREE
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), 0.0) AS none_z_zn_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, COALESCE(CAST(Calcs.num4 AS DOUBLE), 0.0) AS none_z_zn_num_ok, Calcs.num4 AS sum_num4_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -6.050000190734863 -6.050000190734863
+key08 4.769999980926514 4.769999980926514
+key00 0.0 0.0
+key07 -10.239999771118164 -10.239999771118164
+key10 19.389999389648438 19.389999389648438
+key15 6.75 6.75
+key12 3.380000114440918 3.380000114440918
+key13 0.0 0.0
+key05 10.710000038146973 10.710000038146973
+key09 0.0 0.0
+key11 3.819999933242798 3.819999933242798
+key01 10.850000381469727 10.850000381469727
+key06 0.0 0.0
+key16 0.0 0.0
+key04 8.319999694824219 8.319999694824219
+key14 -14.210000038146973 -14.210000038146973
+key02 -13.470000267028809 -13.470000267028809
+PREHOOK: query: SELECT Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) AS str2__group__1, SUM(1) AS sum_number_of_records_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END) AS str2__group__1, SUM(1) AS sum_number_of_records_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2, (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'eight' OR Calcs.str2 = 'eleven') THEN 'eight' WHEN (Calcs.str2 = 'sixteen' OR Calcs.str2 = 'ten') THEN 'sixteen' WHEN (Calcs.str2 = 'three' OR Calcs.str2 = 'twelve') THEN 'three' ELSE 'two' END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL NULL 4
+eight eight 1
+eleven eight 1
+fifteen two 1
+five two 1
+fourteen two 1
+nine two 1
+one two 1
+six two 1
+sixteen sixteen 1
+ten sixteen 1
+three three 1
+twelve three 1
+two two 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, CASE WHEN Calcs.int2 = 0 THEN NULL ELSE ( Calcs.int3 / Calcs.int2 ) END AS sum_z_div_int_zero_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int2 AS sum_int2_ok, Calcs.int3 AS sum_int3_ok, CASE WHEN Calcs.int2 = 0 THEN NULL ELSE ( Calcs.int3 / Calcs.int2 ) END AS sum_z_div_int_zero_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -5 5 -1.0
+key08 -6 17 -2.8333333333333335
+key00 5 8 1.6
+key07 0 3 NULL
+key10 -3 11 -3.6666666666666665
+key15 -9 11 -1.2222222222222223
+key12 0 11 NULL
+key13 4 18 4.5
+key05 2 7 3.5
+key09 -9 2 -0.2222222222222222
+key11 -4 2 -0.5
+key01 -4 13 -3.25
+key06 9 18 2.0
+key16 6 0 0.0
+key04 3 9 3.0
+key14 -8 18 -2.25
+key02 5 2 0.4
+PREHOOK: query: SELECT COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+17 68 4.0
+PREHOOK: query: SELECT Calcs.num0 AS min_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS min_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 key03 -15.699999809265137
+10.0 key08 10.0
+12.300000190734863 key00 12.300000190734863
+0.0 key07 0.0
+0.0 key10 0.0
+0.0 key15 0.0
+0.0 key12 0.0
+0.0 key13 0.0
+-3.5 key05 -3.5
+0.0 key09 0.0
+0.0 key11 0.0
+-12.300000190734863 key01 -12.300000190734863
+0.0 key06 0.0
+0.0 key16 0.0
+3.5 key04 3.5
+0.0 key14 0.0
+15.699999809265137 key02 15.699999809265137
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, SECOND(Calcs.`__time`) AS sum_z_datepart_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, SECOND(Calcs.`__time`) AS sum_z_datepart_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS sum_z_datepart_dayofyear_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, CONCAT(YEAR(Calcs.`__time`), '-01-01 00:00:00')) + 1 AS sum_z_datepart_dayofyear_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 186
+2004-07-04 17:00:00.0 US/Pacific key08 186
+2004-07-08 17:00:00.0 US/Pacific key00 190
+2004-07-12 17:00:00.0 US/Pacific key07 194
+2004-07-13 17:00:00.0 US/Pacific key10 195
+2004-07-13 17:00:00.0 US/Pacific key15 195
+2004-07-16 17:00:00.0 US/Pacific key12 198
+2004-07-19 17:00:00.0 US/Pacific key13 201
+2004-07-21 17:00:00.0 US/Pacific key05 203
+2004-07-23 17:00:00.0 US/Pacific key09 205
+2004-07-24 17:00:00.0 US/Pacific key11 206
+2004-07-25 17:00:00.0 US/Pacific key01 207
+2004-07-27 17:00:00.0 US/Pacific key06 209
+2004-07-27 17:00:00.0 US/Pacific key16 209
+2004-07-28 17:00:00.0 US/Pacific key04 210
+2004-07-30 17:00:00.0 US/Pacific key14 212
+2004-08-01 17:00:00.0 US/Pacific key02 214
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 = '1972-07-04') AS none_z_date_eq_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 = '1972-07-04') AS none_z_date_eq_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 false
+NULL key08 false
+2004-04-15 key00 false
+NULL key07 false
+NULL key10 false
+NULL key15 false
+NULL key12 false
+NULL key13 false
+NULL key05 false
+NULL key09 false
+NULL key11 false
+1972-07-04 key01 false
+NULL key06 false
+NULL key16 false
+2004-06-19 key04 false
+NULL key14 false
+1975-11-12 key02 false
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_day_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, DATEDIFF(Calcs.`__time`, '2004-07-04') AS sum_z_datediff_day_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 4
+2004-07-12 17:00:00.0 US/Pacific key07 8
+2004-07-13 17:00:00.0 US/Pacific key10 9
+2004-07-13 17:00:00.0 US/Pacific key15 9
+2004-07-16 17:00:00.0 US/Pacific key12 12
+2004-07-19 17:00:00.0 US/Pacific key13 15
+2004-07-21 17:00:00.0 US/Pacific key05 17
+2004-07-23 17:00:00.0 US/Pacific key09 19
+2004-07-24 17:00:00.0 US/Pacific key11 20
+2004-07-25 17:00:00.0 US/Pacific key01 21
+2004-07-27 17:00:00.0 US/Pacific key06 23
+2004-07-27 17:00:00.0 US/Pacific key16 23
+2004-07-28 17:00:00.0 US/Pacific key04 24
+2004-07-30 17:00:00.0 US/Pacific key14 26
+2004-08-01 17:00:00.0 US/Pacific key02 28
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) * 60 + COALESCE(SECOND(Calcs.`__time`), 0) - COALESCE(SECOND('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) * 60 + COALESCE(SECOND(Calcs.`__time`), 0) - COALESCE(SECOND('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 61200
+2004-07-04 17:00:00.0 US/Pacific key08 61200
+2004-07-08 17:00:00.0 US/Pacific key00 406800
+2004-07-12 17:00:00.0 US/Pacific key07 752400
+2004-07-13 17:00:00.0 US/Pacific key10 838800
+2004-07-13 17:00:00.0 US/Pacific key15 838800
+2004-07-16 17:00:00.0 US/Pacific key12 1098000
+2004-07-19 17:00:00.0 US/Pacific key13 1357200
+2004-07-21 17:00:00.0 US/Pacific key05 1530000
+2004-07-23 17:00:00.0 US/Pacific key09 1702800
+2004-07-24 17:00:00.0 US/Pacific key11 1789200
+2004-07-25 17:00:00.0 US/Pacific key01 1875600
+2004-07-27 17:00:00.0 US/Pacific key06 2048400
+2004-07-27 17:00:00.0 US/Pacific key16 2048400
+2004-07-28 17:00:00.0 US/Pacific key04 2134800
+2004-07-30 17:00:00.0 US/Pacific key14 2307600
+2004-08-01 17:00:00.0 US/Pacific key02 2480400
+PREHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) AS none_b11703_nk FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING))
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING)) AS none_b11703_nk FROM druid_tableau.calcs Calcs GROUP BY CONCAT(CONCAT(CONCAT('Q',CAST(CAST((MONTH(Calcs.date0) - 1) / 3 + 1 AS BIGINT) AS STRING)),'-'),CAST(YEAR(Calcs.date0) AS STRING))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+Q4-1969
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN (1 IS NULL) OR (LENGTH('BI') IS NULL) THEN NULL WHEN LENGTH('BI') < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) ELSE SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) END) = 'BI' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str1 AS none_str1_nk, (CASE WHEN (1 IS NULL) OR (LENGTH('BI') IS NULL) THEN NULL WHEN LENGTH('BI') < 1 THEN '' WHEN 1 < 1 THEN SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) ELSE SUBSTRING(Calcs.str1,CAST(1 AS INT),CAST(LENGTH('BI') AS INT)) END) = 'BI' AS none_z_startswith_str_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 BINDER ACCESSORIES false
+key08 ANSWERING MACHINES false
+key00 CLAMP ON LAMPS false
+key07 BUSINESS ENVELOPES false
+key10 CD-R MEDIA false
+key15 DVD false
+key12 CORDED KEYBOARDS false
+key13 CORDLESS KEYBOARDS false
+key05 BINDING MACHINES false
+key09 BUSINESS COPIERS false
+key11 CONFERENCE PHONES false
+key01 CLOCKS false
+key06 BINDING SUPPLIES false
+key16 ERICSSON false
+key04 BINDER CLIPS false
+key14 DOT MATRIX PRINTERS false
+key02 AIR PURIFIERS false
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_minute_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_minute_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*60, 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific -4 key03 2004-07-04 16:56:00
+2004-07-04 17:00:00.0 US/Pacific 3 key08 2004-07-04 17:03:00
+2004-07-08 17:00:00.0 US/Pacific -3 key00 2004-07-08 16:57:00
+2004-07-12 17:00:00.0 US/Pacific 2 key07 2004-07-12 17:02:00
+2004-07-13 17:00:00.0 US/Pacific 0 key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific 0 key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific 0 key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific 0 key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific 3 key09 2004-07-23 17:03:00
+2004-07-24 17:00:00.0 US/Pacific -8 key11 2004-07-24 16:52:00
+2004-07-25 17:00:00.0 US/Pacific -6 key01 2004-07-25 16:54:00
+2004-07-27 17:00:00.0 US/Pacific -9 key16 2004-07-27 16:51:00
+2004-07-27 17:00:00.0 US/Pacific 0 key06 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific 0 key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific 0 key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific 0 key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS cnt_str2_ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS cnt_str2_ok, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (CASE WHEN (Calcs.str2 IS NULL) THEN 0 WHEN NOT (Calcs.str2 IS NULL) THEN 1 ELSE NULL END) AS usr_z_count_str2__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0 key03 NULL 0
+1 key08 nine 1
+1 key00 one 1
+1 key07 eight 1
+1 key10 eleven 1
+1 key15 sixteen 1
+0 key12 NULL 0
+1 key13 fourteen 1
+1 key05 six 1
+1 key09 ten 1
+1 key11 twelve 1
+1 key01 two 1
+0 key06 NULL 0
+0 key16 NULL 0
+1 key04 five 1
+1 key14 fifteen 1
+1 key02 three 1
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 < Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_min_date_date_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, (CASE WHEN (Calcs.date0 IS NULL) OR (Calcs.date1 IS NULL) THEN NULL WHEN Calcs.date0 < Calcs.date1 THEN Calcs.date0 ELSE Calcs.date1 END) AS none_z_min_date_date_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 2004-04-04 key03 2004-04-04
+NULL 2004-04-09 key08 NULL
+2004-04-15 2004-04-01 key00 2004-04-01
+NULL 2004-04-08 key07 NULL
+NULL 2004-04-11 key10 NULL
+NULL 2004-04-16 key15 NULL
+NULL 2004-04-13 key12 NULL
+NULL 2004-04-14 key13 NULL
+NULL 2004-04-06 key05 NULL
+NULL 2004-04-10 key09 NULL
+NULL 2004-04-12 key11 NULL
+1972-07-04 2004-04-02 key01 1972-07-04
+NULL 2004-04-07 key06 NULL
+NULL 2004-04-17 key16 NULL
+2004-06-19 2004-04-05 key04 2004-04-05
+NULL 2004-04-15 key14 NULL
+1975-11-12 2004-04-03 key02 1975-11-12
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 >= '1975-11-12') AS none_z_date_ge_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 >= '1975-11-12') AS none_z_date_ge_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 false
+NULL key08 false
+2004-04-15 key00 false
+NULL key07 false
+NULL key10 false
+NULL key15 false
+NULL key12 false
+NULL key13 false
+NULL key05 false
+NULL key09 false
+NULL key11 false
+1972-07-04 key01 false
+NULL key06 false
+NULL key16 false
+2004-06-19 key04 false
+NULL key14 false
+1975-11-12 key02 false
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (Calcs.str2 IS NULL) AS none_z_isnull_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, (Calcs.str2 IS NULL) AS none_z_isnull_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL false
+key08 nine false
+key00 one false
+key07 eight false
+key10 eleven false
+key15 sixteen false
+key12 NULL false
+key13 fourteen false
+key05 six false
+key09 ten false
+key11 twelve false
+key01 two false
+key06 NULL false
+key16 NULL false
+key04 five false
+key14 fifteen false
+key02 three false
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) AS STRING) AS none_z_datename_year_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(YEAR(Calcs.`__time`) AS STRING) AS none_z_datename_year_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004
+2004-07-04 17:00:00.0 US/Pacific key08 2004
+2004-07-08 17:00:00.0 US/Pacific key00 2004
+2004-07-12 17:00:00.0 US/Pacific key07 2004
+2004-07-13 17:00:00.0 US/Pacific key10 2004
+2004-07-13 17:00:00.0 US/Pacific key15 2004
+2004-07-16 17:00:00.0 US/Pacific key12 2004
+2004-07-19 17:00:00.0 US/Pacific key13 2004
+2004-07-21 17:00:00.0 US/Pacific key05 2004
+2004-07-23 17:00:00.0 US/Pacific key09 2004
+2004-07-24 17:00:00.0 US/Pacific key11 2004
+2004-07-25 17:00:00.0 US/Pacific key01 2004
+2004-07-27 17:00:00.0 US/Pacific key06 2004
+2004-07-27 17:00:00.0 US/Pacific key16 2004
+2004-07-28 17:00:00.0 US/Pacific key04 2004
+2004-07-30 17:00:00.0 US/Pacific key14 2004
+2004-08-01 17:00:00.0 US/Pacific key02 2004
+PREHOOK: query: SELECT Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) AS none_z_case_null_null_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date1, Calcs.key, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1) AS none_z_case_null_null_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date1, Calcs.key, IF((((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 7) OR ((1 + IF(7<0, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7),7)-7, PMOD((IF(7<0, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'),7)-7, PMOD(DATEDIFF(TO_DATE(Calcs.date1), '1995-01-01'), 7)) + 7), 7))) = 1)),NULL,Calcs.date1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-04-01 key00 2004-04-01
+2004-04-02 key01 2004-04-02
+2004-04-03 key02 NULL
+2004-04-04 key03 NULL
+2004-04-05 key04 2004-04-05
+2004-04-06 key05 2004-04-06
+2004-04-07 key06 2004-04-07
+2004-04-08 key07 2004-04-08
+2004-04-09 key08 2004-04-09
+2004-04-10 key09 NULL
+2004-04-11 key10 NULL
+2004-04-12 key11 2004-04-12
+2004-04-13 key12 2004-04-13
+2004-04-14 key13 2004-04-14
+2004-04-15 key14 2004-04-15
+2004-04-16 key15 2004-04-16
+2004-04-17 key16 NULL
+PREHOOK: query: SELECT Calcs.num0 AS temp_z_avg_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_avg_num0___2730138885__0_, Calcs.num0 AS avg_num0_ok, Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS temp_z_avg_num0___1723718801__0_, (CASE WHEN (Calcs.num0 IS NULL) THEN 0 WHEN NOT (Calcs.num0 IS NULL) THEN 1 ELSE NULL END) AS temp_z_avg_num0___2730138885__0_, Calcs.num0 AS avg_num0_ok, Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 1 -15.699999809265137 key03
+10.0 1 10.0 key08
+12.300000190734863 1 12.300000190734863 key00
+0.0 1 0.0 key07
+0.0 1 0.0 key10
+0.0 1 0.0 key15
+0.0 1 0.0 key12
+0.0 1 0.0 key13
+-3.5 1 -3.5 key05
+0.0 1 0.0 key09
+0.0 1 0.0 key11
+-12.300000190734863 1 -12.300000190734863 key01
+0.0 1 0.0 key06
+0.0 1 0.0 key16
+3.5 1 3.5 key04
+0.0 1 0.0 key14
+15.699999809265137 1 15.699999809265137 key02
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, COUNT(Calcs.int0) AS temp_z_avg_int0___3910975586__0_, SUM(Calcs.int0) AS temp_z_avg_int0___645427419__0_, AVG(CAST(Calcs.int0 AS DOUBLE)) AS avg_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 1 1 1.0
+key01 1 0 0.0
+key02 1 0 0.0
+key03 1 0 0.0
+key04 1 7 7.0
+key05 1 3 3.0
+key06 1 8 8.0
+key07 1 0 0.0
+key08 1 0 0.0
+key09 1 8 8.0
+key10 1 4 4.0
+key11 1 10 10.0
+key12 1 0 0.0
+key13 1 4 4.0
+key14 1 11 11.0
+key15 1 4 4.0
+key16 1 8 8.0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_dayofyear_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, TO_DATE(Calcs.`__time`) AS none_z_datetrunc_dayofyear_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-08
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-12
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-13
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-13
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-16
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-19
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-21
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-23
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-24
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-27
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-27
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-28
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-30
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 < 0 THEN CAST(NULL AS DOUBLE) ELSE SQRT(Calcs.num0) END) AS sum_z_sqrt_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (CASE WHEN Calcs.num0 < 0 THEN CAST(NULL AS DOUBLE) ELSE SQRT(Calcs.num0) END) AS sum_z_sqrt_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 NULL
+key08 10.0 3.1622776601683795
+key00 12.300000190734863 3.5071356105424356
+key07 0.0 0.0
+key10 0.0 0.0
+key15 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key05 -3.5 NULL
+key09 0.0 0.0
+key11 0.0 0.0
+key01 -12.300000190734863 NULL
+key06 0.0 0.0
+key16 0.0 0.0
+key04 3.5 1.8708286933869707
+key14 0.0 0.0
+key02 15.699999809265137 3.9623225271632214
+PREHOOK: query: SELECT SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS sum_num0_ok, SUM(Calcs.num0) AS usr_z_sum_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 10.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 > Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_max_num_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 > Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_max_num_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 -6.050000190734863 -6.050000190734863
+key08 10.0 4.769999980926514 10.0
+key00 12.300000190734863 0.0 12.300000190734863
+key07 0.0 -10.239999771118164 0.0
+key10 0.0 19.389999389648438 19.389999389648438
+key15 0.0 6.75 6.75
+key12 0.0 3.380000114440918 3.380000114440918
+key13 0.0 0.0 0.0
+key05 -3.5 10.710000038146973 10.710000038146973
+key09 0.0 0.0 0.0
+key11 0.0 3.819999933242798 3.819999933242798
+key01 -12.300000190734863 10.850000381469727 10.850000381469727
+key06 0.0 0.0 0.0
+key16 0.0 0.0 0.0
+key04 3.5 8.319999694824219 8.319999694824219
+key14 0.0 -14.210000038146973 0.0
+key02 15.699999809265137 -13.470000267028809 15.699999809265137
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) AS none_z_dateadd_year_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5))
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5)) AS none_z_dateadd_year_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CONCAT(YEAR(Calcs.`__time`)+Calcs.int1, SUBSTR(FROM_UNIXTIME(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), 5))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific -4 key03 2000-07-04 17:00:00
+2004-07-04 17:00:00.0 US/Pacific 3 key08 2007-07-04 17:00:00
+2004-07-08 17:00:00.0 US/Pacific -3 key00 2001-07-08 17:00:00
+2004-07-12 17:00:00.0 US/Pacific 2 key07 2006-07-12 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific 0 key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific 0 key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific 0 key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific 3 key09 2007-07-23 17:00:00
+2004-07-24 17:00:00.0 US/Pacific -8 key11 1996-07-24 17:00:00
+2004-07-25 17:00:00.0 US/Pacific -6 key01 1998-07-25 17:00:00
+2004-07-27 17:00:00.0 US/Pacific -9 key16 1995-07-27 17:00:00
+2004-07-27 17:00:00.0 US/Pacific 0 key06 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific 0 key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific 0 key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific 0 key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, COUNT(Calcs.num0) AS cnt_num0_ok, COUNT(Calcs.num0) AS usr_z_count_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 1 1
+key01 1 1
+key02 1 1
+key03 1 1
+key04 1 1
+key05 1 1
+key06 1 1
+key07 1 1
+key08 1 1
+key09 1 1
+key10 1 1
+key11 1 1
+key12 1 1
+key13 1 1
+key14 1 1
+key15 1 1
+key16 1 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (-Calcs.num0) AS sum_z_neg_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, (-Calcs.num0) AS sum_z_neg_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 15.699999809265137
+key08 10.0 -10.0
+key00 12.300000190734863 -12.300000190734863
+key07 0.0 -0.0
+key10 0.0 -0.0
+key15 0.0 -0.0
+key12 0.0 -0.0
+key13 0.0 -0.0
+key05 -3.5 3.5
+key09 0.0 -0.0
+key11 0.0 -0.0
+key01 -12.300000190734863 12.300000190734863
+key06 0.0 -0.0
+key16 0.0 -0.0
+key04 3.5 -3.5
+key14 0.0 -0.0
+key02 15.699999809265137 -15.699999809265137
+PREHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(Calcs.num0) AS temp_z_var_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_var_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_var_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN VAR_SAMP(Calcs.num0) ELSE NULL END) AS var_num0_ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+10.0 17 920.059997406006 57.13610277905185
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 < Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_min_num_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num4 AS sum_num4_ok, (CASE WHEN (Calcs.num0 IS NULL) OR (Calcs.num4 IS NULL) THEN NULL WHEN Calcs.num0 < Calcs.num4 THEN Calcs.num0 ELSE Calcs.num4 END) AS sum_z_min_num_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 -6.050000190734863 -15.699999809265137
+key08 10.0 4.769999980926514 4.769999980926514
+key00 12.300000190734863 0.0 0.0
+key07 0.0 -10.239999771118164 -10.239999771118164
+key10 0.0 19.389999389648438 0.0
+key15 0.0 6.75 0.0
+key12 0.0 3.380000114440918 0.0
+key13 0.0 0.0 0.0
+key05 -3.5 10.710000038146973 -3.5
+key09 0.0 0.0 0.0
+key11 0.0 3.819999933242798 0.0
+key01 -12.300000190734863 10.850000381469727 -12.300000190734863
+key06 0.0 0.0 0.0
+key16 0.0 0.0 0.0
+key04 3.5 8.319999694824219 3.5
+key14 0.0 -14.210000038146973 -14.210000038146973
+key02 15.699999809265137 -13.470000267028809 -13.470000267028809
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DAY(Calcs.`__time`) AS STRING) AS none_z_datename_day_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(DAY(Calcs.`__time`) AS STRING) AS none_z_datename_day_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 4
+2004-07-04 17:00:00.0 US/Pacific key08 4
+2004-07-08 17:00:00.0 US/Pacific key00 8
+2004-07-12 17:00:00.0 US/Pacific key07 12
+2004-07-13 17:00:00.0 US/Pacific key10 13
+2004-07-13 17:00:00.0 US/Pacific key15 13
+2004-07-16 17:00:00.0 US/Pacific key12 16
+2004-07-19 17:00:00.0 US/Pacific key13 19
+2004-07-21 17:00:00.0 US/Pacific key05 21
+2004-07-23 17:00:00.0 US/Pacific key09 23
+2004-07-24 17:00:00.0 US/Pacific key11 24
+2004-07-25 17:00:00.0 US/Pacific key01 25
+2004-07-27 17:00:00.0 US/Pacific key06 27
+2004-07-27 17:00:00.0 US/Pacific key16 27
+2004-07-28 17:00:00.0 US/Pacific key04 28
+2004-07-30 17:00:00.0 US/Pacific key14 30
+2004-08-01 17:00:00.0 US/Pacific key02 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, SECOND(Calcs.time1) AS sum_z_timepart_second_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, SECOND(Calcs.time1) AS sum_z_timepart_second_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 22:50:16 0
+key08 22:20:14 0
+key00 19:36:22 0
+key07 19:48:23 0
+key10 00:05:57 0
+key15 NULL 0
+key12 04:48:07 0
+key13 NULL 0
+key05 19:57:33 0
+key09 NULL 0
+key11 04:40:49 0
+key01 02:05:25 0
+key06 NULL 0
+key16 12:33:57 0
+key04 NULL 0
+key14 18:58:41 0
+key02 09:33:31 0
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 IS NULL) AS none_z_isnull_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 IS NULL) AS none_z_isnull_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 false
+NULL key08 false
+2004-04-15 key00 false
+NULL key07 false
+NULL key10 false
+NULL key15 false
+NULL key12 false
+NULL key13 false
+NULL key05 false
+NULL key09 false
+NULL key11 false
+1972-07-04 key01 false
+NULL key06 false
+NULL key16 false
+2004-06-19 key04 false
+NULL key14 false
+1975-11-12 key02 false
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int1 AS sum_int1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int1 AS sum_int1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -4
+key08 3
+key00 -3
+key07 2
+key10 0
+key15 0
+key12 0
+key13 0
+key05 0
+key09 3
+key11 -8
+key01 -6
+key06 0
+key16 -9
+key04 0
+key14 0
+key02 0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.int0) AS sum_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.int0) AS sum_int0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 1
+key01 0
+key02 0
+key03 0
+key04 7
+key05 3
+key06 8
+key07 0
+key08 0
+key09 8
+key10 4
+key11 10
+key12 0
+key13 4
+key14 11
+key15 4
+key16 8
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, false AS none_z_false_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, false AS none_z_false_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false
+key08 false
+key00 false
+key07 false
+key10 false
+key15 false
+key12 false
+key13 false
+key05 false
+key09 false
+key11 false
+key01 false
+key06 false
+key16 false
+key04 false
+key14 false
+key02 false
+PREHOOK: query: SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2 ORDER BY none_str2_nk ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS none_str2_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str2 ORDER BY none_str2_nk ASC
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL
+eight
+eleven
+fifteen
+five
+fourteen
+nine
+one
+six
+sixteen
+ten
+three
+twelve
+two
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MONTH(Calcs.`__time`) AS none_z_month_ok, MONTH(Calcs.`__time`) AS sum_z_datepart_month_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MONTH(Calcs.`__time`) AS none_z_month_ok, MONTH(Calcs.`__time`) AS sum_z_datepart_month_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 7 7
+2004-07-04 17:00:00.0 US/Pacific key08 7 7
+2004-07-08 17:00:00.0 US/Pacific key00 7 7
+2004-07-12 17:00:00.0 US/Pacific key07 7 7
+2004-07-13 17:00:00.0 US/Pacific key10 7 7
+2004-07-13 17:00:00.0 US/Pacific key15 7 7
+2004-07-16 17:00:00.0 US/Pacific key12 7 7
+2004-07-19 17:00:00.0 US/Pacific key13 7 7
+2004-07-21 17:00:00.0 US/Pacific key05 7 7
+2004-07-23 17:00:00.0 US/Pacific key09 7 7
+2004-07-24 17:00:00.0 US/Pacific key11 7 7
+2004-07-25 17:00:00.0 US/Pacific key01 7 7
+2004-07-27 17:00:00.0 US/Pacific key06 7 7
+2004-07-27 17:00:00.0 US/Pacific key16 7 7
+2004-07-28 17:00:00.0 US/Pacific key04 7 7
+2004-07-30 17:00:00.0 US/Pacific key14 7 7
+2004-08-01 17:00:00.0 US/Pacific key02 8 8
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 DESC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num2) AS sum_num2_ok FROM druid_tableau.calcs Calcs JOIN ( SELECT Calcs.key AS none_key_nk, COUNT(1) AS xtableau_join_flag, SUM(Calcs.num2) AS x__alias__0 FROM druid_tableau.calcs Calcs GROUP BY Calcs.key ORDER BY x__alias__0 DESC LIMIT 10 ) t0 ON (Calcs.key = t0.none_key_nk) GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 17.860000610351562
+key01 16.729999542236328
+key03 8.510000228881836
+key05 8.979999542236328
+key06 11.6899995803833
+key07 17.25
+key09 11.5
+key13 13.039999961853027
+key15 10.979999542236328
+key16 7.869999885559082
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 + Calcs.num1) AS sum_z_num_plus_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok, (Calcs.num0 + Calcs.num1) AS sum_z_num_plus_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 7.429999828338623 -8.269999980926514
+key08 10.0 9.470000267028809 19.47000026702881
+key00 12.300000190734863 8.420000076293945 20.72000026702881
+key07 0.0 11.380000114440918 11.380000114440918
+key10 0.0 10.319999694824219 10.319999694824219
+key15 0.0 16.809999465942383 16.809999465942383
+key12 0.0 12.050000190734863 12.050000190734863
+key13 0.0 10.369999885559082 10.369999885559082
+key05 -3.5 9.380000114440918 5.880000114440918
+key09 0.0 12.399999618530273 12.399999618530273
+key11 0.0 2.4700000286102295 2.4700000286102295
+key01 -12.300000190734863 6.710000038146973 -5.590000152587891
+key06 0.0 16.420000076293945 16.420000076293945
+key16 0.0 7.119999885559082 7.119999885559082
+key04 3.5 9.050000190734863 12.550000190734863
+key14 0.0 7.099999904632568 7.099999904632568
+key02 15.699999809265137 9.779999732971191 25.479999542236328
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(FROM_UNIXTIME(IF(UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(CONCAT(TO_DATE(Calcs.`__time`), ' 00:00:00'), 'yyyy-MM-dd')) + COALESCE(HOUR(Calcs.`__time`), 0)*3600, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd')) + COALESCE(MINUTE(Calcs.`__time`), 0)*60, 'yyyy-MM-dd HH:mm:ss') AS none_z_datetrunc_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-04 17:00:00
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-04 17:00:00
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-08 17:00:00
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-12 17:00:00
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-23 17:00:00
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-24 17:00:00
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-25 17:00:00
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-27 17:00:00
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS BIGINT) AS sum_z_int_str_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, CAST(Calcs.int0 AS STRING) AS none_str_int0__nk, CAST(Calcs.num4 AS STRING) AS none_str_num4__nk, CAST(CAST(CONCAT(CAST(Calcs.num4 AS STRING),CAST(Calcs.int0 AS STRING)) AS DOUBLE) AS BIGINT) AS sum_z_int_str_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0 -6.050000190734863 -6
+key08 0 4.769999980926514 4
+key00 1 0.0 0
+key07 0 -10.239999771118164 -10
+key10 4 19.389999389648438 19
+key15 4 6.75 6
+key12 0 3.380000114440918 3
+key13 4 0.0 0
+key05 3 10.710000038146973 10
+key09 8 0.0 0
+key11 10 3.819999933242798 3
+key01 0 10.850000381469727 10
+key06 8 0.0 0
+key16 8 0.0 0
+key04 7 8.319999694824219 8
+key14 11 -14.210000038146973 -14
+key02 0 -13.470000267028809 -13
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdev_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdev_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdev_num0___4071133194__0_, (CASE WHEN COUNT(Calcs.num0) > 1 THEN STDDEV_SAMP(Calcs.num0) ELSE NULL END) AS std_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 1 151.29000469207767 NULL
+key01 -12.300000190734863 1 151.29000469207767 NULL
+key02 15.699999809265137 1 246.48999401092533 NULL
+key03 -15.699999809265137 1 246.48999401092533 NULL
+key04 3.5 1 12.25 NULL
+key05 -3.5 1 12.25 NULL
+key06 0.0 1 0.0 NULL
+key07 0.0 1 0.0 NULL
+key08 10.0 1 100.0 NULL
+key09 0.0 1 0.0 NULL
+key10 0.0 1 0.0 NULL
+key11 0.0 1 0.0 NULL
+key12 0.0 1 0.0 NULL
+key13 0.0 1 0.0 NULL
+key14 0.0 1 0.0 NULL
+key15 0.0 1 0.0 NULL
+key16 0.0 1 0.0 NULL
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, COS(Calcs.num0) AS sum_z_cos_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, COS(Calcs.num0) AS sum_z_cos_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 -0.9999682918304649
+key08 10.0 -0.8390715290764524
+key00 12.300000190734863 0.9647326680940718
+key07 0.0 1.0
+key10 0.0 1.0
+key15 0.0 1.0
+key12 0.0 1.0
+key13 0.0 1.0
+key05 -3.5 -0.9364566872907963
+key09 0.0 1.0
+key11 0.0 1.0
+key01 -12.300000190734863 0.9647326680940718
+key06 0.0 1.0
+key16 0.0 1.0
+key04 3.5 -0.9364566872907963
+key14 0.0 1.0
+key02 15.699999809265137 -0.9999682918304649
+PREHOOK: query: SELECT Calcs.num0 AS max_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.num0 AS max_num0_ok, Calcs.key AS none_key_nk, Calcs.num0 AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-15.699999809265137 key03 -15.699999809265137
+10.0 key08 10.0
+12.300000190734863 key00 12.300000190734863
+0.0 key07 0.0
+0.0 key10 0.0
+0.0 key15 0.0
+0.0 key12 0.0
+0.0 key13 0.0
+-3.5 key05 -3.5
+0.0 key09 0.0
+0.0 key11 0.0
+-12.300000190734863 key01 -12.300000190734863
+0.0 key06 0.0
+0.0 key16 0.0
+3.5 key04 3.5
+0.0 key14 0.0
+15.699999809265137 key02 15.699999809265137
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_stdevp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_stdevp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_stdevp_num0___4071133194__0_, STDDEV_POP(Calcs.num0) AS stp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 1 151.29000469207767 0.0
+key01 -12.300000190734863 1 151.29000469207767 0.0
+key02 15.699999809265137 1 246.48999401092533 0.0
+key03 -15.699999809265137 1 246.48999401092533 0.0
+key04 3.5 1 12.25 0.0
+key05 -3.5 1 12.25 0.0
+key06 0.0 1 0.0 0.0
+key07 0.0 1 0.0 0.0
+key08 10.0 1 100.0 0.0
+key09 0.0 1 0.0 0.0
+key10 0.0 1 0.0 0.0
+key11 0.0 1 0.0 0.0
+key12 0.0 1 0.0 0.0
+key13 0.0 1 0.0 0.0
+key14 0.0 1 0.0 0.0
+key15 0.0 1 0.0 0.0
+key16 0.0 1 0.0 0.0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, HOUR(Calcs.`__time`) AS sum_z_datepart_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, HOUR(Calcs.`__time`) AS sum_z_datepart_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 17
+2004-07-04 17:00:00.0 US/Pacific key08 17
+2004-07-08 17:00:00.0 US/Pacific key00 17
+2004-07-12 17:00:00.0 US/Pacific key07 17
+2004-07-13 17:00:00.0 US/Pacific key10 17
+2004-07-13 17:00:00.0 US/Pacific key15 17
+2004-07-16 17:00:00.0 US/Pacific key12 17
+2004-07-19 17:00:00.0 US/Pacific key13 17
+2004-07-21 17:00:00.0 US/Pacific key05 17
+2004-07-23 17:00:00.0 US/Pacific key09 17
+2004-07-24 17:00:00.0 US/Pacific key11 17
+2004-07-25 17:00:00.0 US/Pacific key01 17
+2004-07-27 17:00:00.0 US/Pacific key06 17
+2004-07-27 17:00:00.0 US/Pacific key16 17
+2004-07-28 17:00:00.0 US/Pacific key04 17
+2004-07-30 17:00:00.0 US/Pacific key14 17
+2004-08-01 17:00:00.0 US/Pacific key02 17
+PREHOOK: query: SELECT Calcs.str2 AS temp_z_max_str2___3598104523__0_, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_len_str2__ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str2 AS temp_z_max_str2___3598104523__0_, Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, LENGTH(Calcs.str2) AS sum_len_str2__ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL key03 NULL 0
+nine key08 nine 4
+one key00 one 3
+eight key07 eight 5
+eleven key10 eleven 6
+sixteen key15 sixteen 7
+NULL key12 NULL 0
+fourteen key13 fourteen 8
+six key05 six 3
+ten key09 ten 3
+twelve key11 twelve 6
+two key01 two 3
+NULL key06 NULL 0
+NULL key16 NULL 0
+five key04 five 4
+fifteen key14 fifteen 7
+three key02 three 5
+PREHOOK: query: SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) AS str2__bin_ FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END) AS str2__bin_ FROM druid_tableau.calcs Calcs GROUP BY (CASE WHEN (Calcs.str2 IS NULL) THEN NULL WHEN (Calcs.str2 = 'one' OR Calcs.str2 = 'three' OR Calcs.str2 = 'two') THEN 'one' WHEN (Calcs.str2 = 'eight') THEN NULL ELSE Calcs.str2 END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL
+eleven
+fifteen
+five
+fourteen
+nine
+one
+six
+sixteen
+ten
+twelve
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_varp_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_varp_num0___2730138885__0_, SUM((Calcs.num0 * Calcs.num0)) AS temp_z_varp_num0___4071133194__0_, VAR_POP(Calcs.num0) AS vrp_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 1 151.29000469207767 0.0
+key01 -12.300000190734863 1 151.29000469207767 0.0
+key02 15.699999809265137 1 246.48999401092533 0.0
+key03 -15.699999809265137 1 246.48999401092533 0.0
+key04 3.5 1 12.25 0.0
+key05 -3.5 1 12.25 0.0
+key06 0.0 1 0.0 0.0
+key07 0.0 1 0.0 0.0
+key08 10.0 1 100.0 0.0
+key09 0.0 1 0.0 0.0
+key10 0.0 1 0.0 0.0
+key11 0.0 1 0.0 0.0
+key12 0.0 1 0.0 0.0
+key13 0.0 1 0.0 0.0
+key14 0.0 1 0.0 0.0
+key15 0.0 1 0.0 0.0
+key16 0.0 1 0.0 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <> (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ne_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 <> (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str2 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_ne_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL e NULL -7.25
+key08 nine NULL NULL -13.380000114440918
+key00 one e true -11.520000457763672
+key07 eight e false 3.640000104904175
+key10 eleven e true -4.789999961853027
+key15 sixteen e true -10.979999542236328
+key12 NULL NULL NULL -6.619999885559082
+key13 fourteen NULL NULL -18.43000030517578
+key05 six NULL NULL -19.959999084472656
+key09 ten e true -10.5600004196167
+key11 twelve NULL NULL -10.8100004196167
+key01 two e true -9.3100004196167
+key06 NULL e NULL 10.930000305175781
+key16 NULL NULL NULL -2.5999999046325684
+key04 five NULL false 12.930000305175781
+key14 fifteen e false 6.840000152587891
+key02 three e true -12.170000076293945
+PREHOOK: query: SELECT DAY(Calcs.date1) AS dy_date1_ok FROM druid_tableau.calcs Calcs WHERE (DAY(Calcs.date1) IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT DAY(Calcs.date1) AS dy_date1_ok FROM druid_tableau.calcs Calcs WHERE (DAY(Calcs.date1) IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(HOUR(Calcs.`__time`) AS STRING), '') AS none_z_datename_hour_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(HOUR(Calcs.`__time`) AS STRING), '') AS none_z_datename_hour_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 17
+2004-07-04 17:00:00.0 US/Pacific key08 17
+2004-07-08 17:00:00.0 US/Pacific key00 17
+2004-07-12 17:00:00.0 US/Pacific key07 17
+2004-07-13 17:00:00.0 US/Pacific key10 17
+2004-07-13 17:00:00.0 US/Pacific key15 17
+2004-07-16 17:00:00.0 US/Pacific key12 17
+2004-07-19 17:00:00.0 US/Pacific key13 17
+2004-07-21 17:00:00.0 US/Pacific key05 17
+2004-07-23 17:00:00.0 US/Pacific key09 17
+2004-07-24 17:00:00.0 US/Pacific key11 17
+2004-07-25 17:00:00.0 US/Pacific key01 17
+2004-07-27 17:00:00.0 US/Pacific key06 17
+2004-07-27 17:00:00.0 US/Pacific key16 17
+2004-07-28 17:00:00.0 US/Pacific key04 17
+2004-07-30 17:00:00.0 US/Pacific key14 17
+2004-08-01 17:00:00.0 US/Pacific key02 17
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(SECOND(Calcs.`__time`) AS STRING), '') AS none_z_datename_second_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(SECOND(Calcs.`__time`) AS STRING), '') AS none_z_datename_second_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_hour_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss') AS none_z_dateadd_hour_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, FROM_UNIXTIME(IF(UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss') > 0, UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd HH:mm:ss'), UNIX_TIMESTAMP(Calcs.`__time`, 'yyyy-MM-dd')) + Calcs.int1*3600, 'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific -4 key03 2004-07-04 13:00:00
+2004-07-04 17:00:00.0 US/Pacific 3 key08 2004-07-04 20:00:00
+2004-07-08 17:00:00.0 US/Pacific -3 key00 2004-07-08 14:00:00
+2004-07-12 17:00:00.0 US/Pacific 2 key07 2004-07-12 19:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific 0 key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific 0 key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific 0 key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific 3 key09 2004-07-23 20:00:00
+2004-07-24 17:00:00.0 US/Pacific -8 key11 2004-07-24 09:00:00
+2004-07-25 17:00:00.0 US/Pacific -6 key01 2004-07-25 11:00:00
+2004-07-27 17:00:00.0 US/Pacific -9 key16 2004-07-27 08:00:00
+2004-07-27 17:00:00.0 US/Pacific 0 key06 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific 0 key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific 0 key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific 0 key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, SUM(Calcs.num0) AS temp_z_avg_num0___1723718801__0_, COUNT(Calcs.num0) AS temp_z_avg_num0___2730138885__0_, AVG(Calcs.num0) AS avg_num0_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 1 12.300000190734863
+key01 -12.300000190734863 1 -12.300000190734863
+key02 15.699999809265137 1 15.699999809265137
+key03 -15.699999809265137 1 -15.699999809265137
+key04 3.5 1 3.5
+key05 -3.5 1 -3.5
+key06 0.0 1 0.0
+key07 0.0 1 0.0
+key08 10.0 1 10.0
+key09 0.0 1 0.0
+key10 0.0 1 0.0
+key11 0.0 1 0.0
+key12 0.0 1 0.0
+key13 0.0 1 0.0
+key14 0.0 1 0.0
+key15 0.0 1 0.0
+key16 0.0 1 0.0
+PREHOOK: query: SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num2 AS none_num2_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT (Calcs.num1 > 10.) AS none_calcfield01_nk, (Calcs.num2 > 10.) AS none_calcfield02_nk, Calcs.num2 AS none_num2_ok FROM druid_tableau.calcs Calcs GROUP BY (Calcs.num1 > 10.), (Calcs.num2 > 10.), Calcs.num2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+false false 0.0
+false false 3.7899999618530273
+false false 6.460000038146973
+false false 7.869999885559082
+false false 8.510000228881836
+false false 8.979999542236328
+false false 16.729999542236328
+false false 17.860000610351562
+false false 0.0
+false false 6.800000190734863
+false false 10.979999542236328
+false false 11.5
+false false 11.6899995803833
+false false 13.039999961853027
+false false 17.25
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 one one 3
+key01 two two 3
+key02 three three 5
+key03 NULL NULL 0
+key04 five five 4
+key05 six six 3
+key06 NULL NULL 0
+key07 eight eight 5
+key08 nine nine 4
+key09 ten ten 3
+key10 eleven eleven 6
+key11 twelve twelve 6
+key12 NULL NULL 0
+key13 fourteen fourteen 8
+key14 fifteen fifteen 7
+key15 sixteen sixteen 7
+key16 NULL NULL 0
+PREHOOK: query: SELECT TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) AS none_date_datetime0__ok, COUNT(Calcs.key) AS cnt_key_ok FROM druid_tableau.calcs Calcs GROUP BY TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END))
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END)) AS none_date_datetime0__ok, COUNT(Calcs.key) AS cnt_key_ok FROM druid_tableau.calcs Calcs GROUP BY TO_DATE((CASE WHEN (DAY(Calcs.`__time`) < 10) THEN NULL ELSE Calcs.`__time` END))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL 4
+2004-07-12 1
+2004-07-13 2
+2004-07-16 1
+2004-07-19 1
+2004-07-21 1
+2004-07-23 1
+2004-07-24 1
+2004-07-25 1
+2004-07-27 2
+2004-07-28 1
+2004-07-30 1
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, CONCAT(CONCAT(' ',Calcs.str2),' ') AS none_padded_str2_nk, CONCAT(CONCAT('|',RTRIM(CONCAT(CONCAT(' ',Calcs.str2),' '))),'|') AS none_z_rtrim_str_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, CONCAT(CONCAT(' ',Calcs.str2),' ') AS none_padded_str2_nk, CONCAT(CONCAT('|',RTRIM(CONCAT(CONCAT(' ',Calcs.str2),' '))),'|') AS none_z_rtrim_str_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 NULL NULL
+key08 nine | nine|
+key00 one | one|
+key07 eight | eight|
+key10 eleven | eleven|
+key15 sixteen | sixteen|
+key12 NULL NULL
+key13 fourteen | fourteen|
+key05 six | six|
+key09 ten | ten|
+key11 twelve | twelve|
+key01 two | two|
+key06 NULL NULL
+key16 NULL NULL
+key04 five | five|
+key14 fifteen | fifteen|
+key02 three | three|
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST(((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) * 60 + COALESCE(MINUTE(Calcs.`__time`), 0) - COALESCE(MINUTE('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 1020
+2004-07-04 17:00:00.0 US/Pacific key08 1020
+2004-07-08 17:00:00.0 US/Pacific key00 6780
+2004-07-12 17:00:00.0 US/Pacific key07 12540
+2004-07-13 17:00:00.0 US/Pacific key10 13980
+2004-07-13 17:00:00.0 US/Pacific key15 13980
+2004-07-16 17:00:00.0 US/Pacific key12 18300
+2004-07-19 17:00:00.0 US/Pacific key13 22620
+2004-07-21 17:00:00.0 US/Pacific key05 25500
+2004-07-23 17:00:00.0 US/Pacific key09 28380
+2004-07-24 17:00:00.0 US/Pacific key11 29820
+2004-07-25 17:00:00.0 US/Pacific key01 31260
+2004-07-27 17:00:00.0 US/Pacific key06 34140
+2004-07-27 17:00:00.0 US/Pacific key16 34140
+2004-07-28 17:00:00.0 US/Pacific key04 35580
+2004-07-30 17:00:00.0 US/Pacific key14 38460
+2004-08-01 17:00:00.0 US/Pacific key02 41340
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, HOUR(Calcs.time1) AS sum_z_timepart_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, HOUR(Calcs.time1) AS sum_z_timepart_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 22:50:16 16
+key08 22:20:14 16
+key00 19:36:22 16
+key07 19:48:23 16
+key10 00:05:57 16
+key15 NULL 16
+key12 04:48:07 16
+key13 NULL 16
+key05 19:57:33 16
+key09 NULL 16
+key11 04:40:49 16
+key01 02:05:25 16
+key06 NULL 16
+key16 12:33:57 16
+key04 NULL 16
+key14 18:58:41 16
+key02 09:33:31 16
+PREHOOK: query: SELECT Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs WHERE (Calcs.key IS NULL) LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk FROM druid_tableau.calcs Calcs WHERE (Calcs.key IS NULL) LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, POW(Calcs.num0,2) AS sum_z_square_num_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS sum_num0_ok, POW(Calcs.num0,2) AS sum_z_square_num_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 -15.699999809265137 246.48999401092533
+key08 10.0 100.0
+key00 12.300000190734863 151.29000469207767
+key07 0.0 0.0
+key10 0.0 0.0
+key15 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key05 -3.5 12.25
+key09 0.0 0.0
+key11 0.0 0.0
+key01 -12.300000190734863 151.29000469207767
+key06 0.0 0.0
+key16 0.0 0.0
+key04 3.5 12.25
+key14 0.0 0.0
+key02 15.699999809265137 246.48999401092533
+PREHOOK: query: SELECT MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT MAX(Calcs.num0) AS max_num0_ok, MAX(Calcs.num0) AS usr_z_max_num0__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+15.699999809265137 15.699999809265137
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.date1 AS none_date1_ok, Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END) AS none_z_if_cmp_date_date_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.date0, Calcs.date1, Calcs.key, Calcs.num0, Calcs.num1, (CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.date0 WHEN NOT (Calcs.num0 > Calcs.num1) THEN Calcs.date1 ELSE NULL END)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL 2004-04-06 key05 -3.5 9.380000114440918 2004-04-06
+NULL 2004-04-07 key06 0.0 16.420000076293945 2004-04-07
+NULL 2004-04-08 key07 0.0 11.380000114440918 2004-04-08
+NULL 2004-04-09 key08 10.0 9.470000267028809 NULL
+NULL 2004-04-10 key09 0.0 12.399999618530273 2004-04-10
+NULL 2004-04-11 key10 0.0 10.319999694824219 2004-04-11
+NULL 2004-04-12 key11 0.0 2.4700000286102295 2004-04-12
+NULL 2004-04-13 key12 0.0 12.050000190734863 2004-04-13
+NULL 2004-04-14 key13 0.0 10.369999885559082 2004-04-14
+NULL 2004-04-15 key14 0.0 7.099999904632568 2004-04-15
+NULL 2004-04-16 key15 0.0 16.809999465942383 2004-04-16
+NULL 2004-04-17 key16 0.0 7.119999885559082 2004-04-17
+1972-07-04 2004-04-02 key01 -12.300000190734863 6.710000038146973 2004-04-02
+1975-11-12 2004-04-03 key02 15.699999809265137 9.779999732971191 1975-11-12
+2004-04-15 2004-04-01 key00 12.300000190734863 8.420000076293945 2004-04-15
+2004-06-04 2004-04-04 key03 -15.699999809265137 7.429999828338623 2004-04-04
+2004-06-19 2004-04-05 key04 3.5 9.050000190734863 2004-04-05
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), (CASE WHEN MONTH(Calcs.`__time`)<4 THEN '-01' WHEN MONTH(Calcs.`__time`)<7 THEN '-04' WHEN MONTH(Calcs.`__time`)<10 THEN '-07' ELSE '-10' END), '-01 00:00:00') AS none_z_datetrunc_quarter_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CONCAT(YEAR(Calcs.`__time`), (CASE WHEN MONTH(Calcs.`__time`)<4 THEN '-01' WHEN MONTH(Calcs.`__time`)<7 THEN '-04' WHEN MONTH(Calcs.`__time`)<10 THEN '-07' ELSE '-10' END), '-01 00:00:00') AS none_z_datetrunc_quarter_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004-07-01 00:00:00
+2004-07-04 17:00:00.0 US/Pacific key08 2004-07-01 00:00:00
+2004-07-08 17:00:00.0 US/Pacific key00 2004-07-01 00:00:00
+2004-07-12 17:00:00.0 US/Pacific key07 2004-07-01 00:00:00
+2004-07-13 17:00:00.0 US/Pacific key10 2004-07-01 00:00:00
+2004-07-13 17:00:00.0 US/Pacific key15 2004-07-01 00:00:00
+2004-07-16 17:00:00.0 US/Pacific key12 2004-07-01 00:00:00
+2004-07-19 17:00:00.0 US/Pacific key13 2004-07-01 00:00:00
+2004-07-21 17:00:00.0 US/Pacific key05 2004-07-01 00:00:00
+2004-07-23 17:00:00.0 US/Pacific key09 2004-07-01 00:00:00
+2004-07-24 17:00:00.0 US/Pacific key11 2004-07-01 00:00:00
+2004-07-25 17:00:00.0 US/Pacific key01 2004-07-01 00:00:00
+2004-07-27 17:00:00.0 US/Pacific key06 2004-07-01 00:00:00
+2004-07-27 17:00:00.0 US/Pacific key16 2004-07-01 00:00:00
+2004-07-28 17:00:00.0 US/Pacific key04 2004-07-01 00:00:00
+2004-07-30 17:00:00.0 US/Pacific key14 2004-07-01 00:00:00
+2004-08-01 17:00:00.0 US/Pacific key02 2004-07-01 00:00:00
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MINUTE(Calcs.`__time`) AS sum_z_datepart_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, MINUTE(Calcs.`__time`) AS sum_z_datepart_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 0
+PREHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 < '1975-11-12') AS none_z_date_lt_date_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.date0 AS none_date0_ok, Calcs.key AS none_key_nk, (Calcs.date0 < '1975-11-12') AS none_z_date_lt_date_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-06-04 key03 false
+NULL key08 false
+2004-04-15 key00 false
+NULL key07 false
+NULL key10 false
+NULL key15 false
+NULL key12 false
+NULL key13 false
+NULL key05 false
+NULL key09 false
+NULL key11 false
+1972-07-04 key01 false
+NULL key06 false
+NULL key16 false
+2004-06-19 key04 false
+NULL key14 false
+1975-11-12 key02 false
+PREHOOK: query: SELECT Calcs.str0 AS none_str0_nk, 'CONST' AS none_z_const_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str0
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.str0 AS none_str0_nk, 'CONST' AS none_z_const_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.str0
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+FURNITURE CONST
+OFFICE SUPPLIES CONST
+TECHNOLOGY CONST
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, Calcs.int1 AS sum_int1_ok, CASE WHEN Calcs.int1 = 0 THEN NULL ELSE ( Calcs.int0 / Calcs.int1 ) END AS sum_z_div_int_int_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.int0 AS sum_int0_ok, Calcs.int1 AS sum_int1_ok, CASE WHEN Calcs.int1 = 0 THEN NULL ELSE ( Calcs.int0 / Calcs.int1 ) END AS sum_z_div_int_int_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 0 -4 -0.0
+key08 0 3 0.0
+key00 1 -3 -0.3333333333333333
+key07 0 2 0.0
+key10 4 0 NULL
+key15 4 0 NULL
+key12 0 0 NULL
+key13 4 0 NULL
+key05 3 0 NULL
+key09 8 3 2.6666666666666665
+key11 10 -8 -1.25
+key01 0 -6 -0.0
+key06 8 0 NULL
+key16 8 -9 -0.8888888888888888
+key04 7 0 NULL
+key14 11 0 NULL
+key02 0 0 NULL
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 ELSE Calcs.num1 END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN Calcs.num0 ELSE Calcs.num1 END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 8.420000076293945 12.300000190734863
+key01 -12.300000190734863 6.710000038146973 6.710000038146973
+key02 15.699999809265137 9.779999732971191 15.699999809265137
+key03 -15.699999809265137 7.429999828338623 7.429999828338623
+key04 3.5 9.050000190734863 9.050000190734863
+key05 -3.5 9.380000114440918 9.380000114440918
+key06 0.0 16.420000076293945 16.420000076293945
+key07 0.0 11.380000114440918 11.380000114440918
+key08 10.0 9.470000267028809 10.0
+key09 0.0 12.399999618530273 12.399999618530273
+key10 0.0 10.319999694824219 10.319999694824219
+key11 0.0 2.4700000286102295 2.4700000286102295
+key12 0.0 12.050000190734863 12.050000190734863
+key13 0.0 10.369999885559082 10.369999885559082
+key14 0.0 7.099999904632568 7.099999904632568
+key15 0.0 16.809999465942383 16.809999465942383
+key16 0.0 7.119999885559082 7.119999885559082
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, MINUTE(Calcs.time1) AS sum_z_timepart_minute_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.time1 AS none_time1_ok, MINUTE(Calcs.time1) AS sum_z_timepart_minute_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 22:50:16 0
+key08 22:20:14 0
+key00 19:36:22 0
+key07 19:48:23 0
+key10 00:05:57 0
+key15 NULL 0
+key12 04:48:07 0
+key13 NULL 0
+key05 19:57:33 0
+key09 NULL 0
+key11 04:40:49 0
+key01 02:05:25 0
+key06 NULL 0
+key16 12:33:57 0
+key04 NULL 0
+key14 18:58:41 0
+key02 09:33:31 0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, YEAR(Calcs.`__time`) AS none_z_year_ok, YEAR(Calcs.`__time`) AS sum_z_datepart_year_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, YEAR(Calcs.`__time`) AS none_z_year_ok, YEAR(Calcs.`__time`) AS sum_z_datepart_year_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 2004 2004
+2004-07-04 17:00:00.0 US/Pacific key08 2004 2004
+2004-07-08 17:00:00.0 US/Pacific key00 2004 2004
+2004-07-12 17:00:00.0 US/Pacific key07 2004 2004
+2004-07-13 17:00:00.0 US/Pacific key10 2004 2004
+2004-07-13 17:00:00.0 US/Pacific key15 2004 2004
+2004-07-16 17:00:00.0 US/Pacific key12 2004 2004
+2004-07-19 17:00:00.0 US/Pacific key13 2004 2004
+2004-07-21 17:00:00.0 US/Pacific key05 2004 2004
+2004-07-23 17:00:00.0 US/Pacific key09 2004 2004
+2004-07-24 17:00:00.0 US/Pacific key11 2004 2004
+2004-07-25 17:00:00.0 US/Pacific key01 2004 2004
+2004-07-27 17:00:00.0 US/Pacific key06 2004 2004
+2004-07-27 17:00:00.0 US/Pacific key16 2004 2004
+2004-07-28 17:00:00.0 US/Pacific key04 2004 2004
+2004-07-30 17:00:00.0 US/Pacific key14 2004 2004
+2004-08-01 17:00:00.0 US/Pacific key02 2004 2004
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 > (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str0 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_gt_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str0 AS none_str0_nk, Calcs.str2 AS none_str2_nk, Calcs.str3 AS none_str3_nk, (Calcs.str2 > (CASE WHEN (Calcs.num3 > 0) THEN Calcs.str0 WHEN NOT (Calcs.num3 > 0) THEN Calcs.str3 ELSE NULL END)) AS none_z_str_gt_str_nk, Calcs.num3 AS sum_num3_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 OFFICE SUPPLIES NULL e NULL -7.25
+key08 TECHNOLOGY nine NULL NULL -13.380000114440918
+key00 FURNITURE one e true -11.520000457763672
+key07 OFFICE SUPPLIES eight e true 3.640000104904175
+key10 TECHNOLOGY eleven e true -4.789999961853027
+key15 TECHNOLOGY sixteen e true -10.979999542236328
+key12 TECHNOLOGY NULL NULL NULL -6.619999885559082
+key13 TECHNOLOGY fourteen NULL NULL -18.43000030517578
+key05 OFFICE SUPPLIES six NULL NULL -19.959999084472656
+key09 TECHNOLOGY ten e true -10.5600004196167
+key11 TECHNOLOGY twelve NULL NULL -10.8100004196167
+key01 FURNITURE two e true -9.3100004196167
+key06 OFFICE SUPPLIES NULL e NULL 10.930000305175781
+key16 TECHNOLOGY NULL NULL NULL -2.5999999046325684
+key04 OFFICE SUPPLIES five NULL true 12.930000305175781
+key14 TECHNOLOGY fifteen e true 6.840000152587891
+key02 OFFICE SUPPLIES three e true -12.170000076293945
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(MINUTE(Calcs.`__time`) AS STRING), '') AS none_z_datename_minute_nk FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, COALESCE(CAST(MINUTE(Calcs.`__time`) AS STRING), '') AS none_z_datename_minute_nk FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 0
+2004-07-04 17:00:00.0 US/Pacific key08 0
+2004-07-08 17:00:00.0 US/Pacific key00 0
+2004-07-12 17:00:00.0 US/Pacific key07 0
+2004-07-13 17:00:00.0 US/Pacific key10 0
+2004-07-13 17:00:00.0 US/Pacific key15 0
+2004-07-16 17:00:00.0 US/Pacific key12 0
+2004-07-19 17:00:00.0 US/Pacific key13 0
+2004-07-21 17:00:00.0 US/Pacific key05 0
+2004-07-23 17:00:00.0 US/Pacific key09 0
+2004-07-24 17:00:00.0 US/Pacific key11 0
+2004-07-25 17:00:00.0 US/Pacific key01 0
+2004-07-27 17:00:00.0 US/Pacific key06 0
+2004-07-27 17:00:00.0 US/Pacific key16 0
+2004-07-28 17:00:00.0 US/Pacific key04 0
+2004-07-30 17:00:00.0 US/Pacific key14 0
+2004-08-01 17:00:00.0 US/Pacific key02 0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 >= Calcs.num1) AS none_z_num_ge_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, (Calcs.num0 >= Calcs.num1) AS none_z_num_ge_num_nk, Calcs.num0 AS sum_num0_ok, Calcs.num1 AS sum_num1_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key03 false -15.699999809265137 7.429999828338623
+key08 false 10.0 9.470000267028809
+key00 false 12.300000190734863 8.420000076293945
+key07 false 0.0 11.380000114440918
+key10 false 0.0 10.319999694824219
+key15 false 0.0 16.809999465942383
+key12 false 0.0 12.050000190734863
+key13 false 0.0 10.369999885559082
+key05 false -3.5 9.380000114440918
+key09 false 0.0 12.399999618530273
+key11 false 0.0 2.4700000286102295
+key01 false -12.300000190734863 6.710000038146973
+key06 false 0.0 16.420000076293945
+key16 false 0.0 7.119999885559082
+key04 false 3.5 9.050000190734863
+key14 false 0.0 7.099999904632568
+key02 false 15.699999809265137 9.779999732971191
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str2 AS none_z_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.str2 AS none_str2_nk, Calcs.str2 AS none_z_str_str_nk FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.str2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 one one
+key01 two two
+key02 three three
+key03 NULL NULL
+key04 five five
+key05 six six
+key06 NULL NULL
+key07 eight eight
+key08 nine nine
+key09 ten ten
+key10 eleven eleven
+key11 twelve twelve
+key12 NULL NULL
+key13 fourteen fourteen
+key14 fifteen fifteen
+key15 sixteen sixteen
+key16 NULL NULL
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, MIN(Calcs.num0) AS min_num0_ok, MIN(Calcs.num0) AS usr_z_min_num0__ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 12.300000190734863
+key01 -12.300000190734863 -12.300000190734863
+key02 15.699999809265137 15.699999809265137
+key03 -15.699999809265137 -15.699999809265137
+key04 3.5 3.5
+key05 -3.5 -3.5
+key06 0.0 0.0
+key07 0.0 0.0
+key08 10.0 10.0
+key09 0.0 0.0
+key10 0.0 0.0
+key11 0.0 0.0
+key12 0.0 0.0
+key13 0.0 0.0
+key14 0.0 0.0
+key15 0.0 0.0
+key16 0.0 0.0
+PREHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num0 AS DOUBLE) WHEN NOT (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num1 AS DOUBLE) ELSE NULL END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.key AS none_key_nk, Calcs.num0 AS none_num0__dim__ok, Calcs.num1 AS none_num1__dim__ok, SUM((CASE WHEN (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num0 AS DOUBLE) WHEN NOT (Calcs.num0 > Calcs.num1) THEN CAST(Calcs.num1 AS DOUBLE) ELSE NULL END)) AS sum_z_if_cmp_num_num_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.key, Calcs.num0, Calcs.num1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+key00 12.300000190734863 8.420000076293945 12.300000190734863
+key01 -12.300000190734863 6.710000038146973 6.710000038146973
+key02 15.699999809265137 9.779999732971191 15.699999809265137
+key03 -15.699999809265137 7.429999828338623 7.429999828338623
+key04 3.5 9.050000190734863 9.050000190734863
+key05 -3.5 9.380000114440918 9.380000114440918
+key06 0.0 16.420000076293945 16.420000076293945
+key07 0.0 11.380000114440918 11.380000114440918
+key08 10.0 9.470000267028809 10.0
+key09 0.0 12.399999618530273 12.399999618530273
+key10 0.0 10.319999694824219 10.319999694824219
+key11 0.0 2.4700000286102295 2.4700000286102295
+key12 0.0 12.050000190734863 12.050000190734863
+key13 0.0 10.369999885559082 10.369999885559082
+key14 0.0 7.099999904632568 7.099999904632568
+key15 0.0 16.809999465942383 16.809999465942383
+key16 0.0 7.119999885559082 7.119999885559082
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_quarter_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.int1 AS none_int1_ok, Calcs.key AS none_key_nk, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END AS none_z_dateadd_quarter_ok FROM druid_tableau.calcs Calcs GROUP BY Calcs.`__time`, Calcs.int1, Calcs.key, CASE WHEN (Calcs.`__time` IS NOT NULL AND Calcs.int1 IS NOT NULL) THEN FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(Calcs.`__time`)+FLOOR((MONTH(Calcs.`__time`)+(Calcs.int1 * 3))/12)), CONCAT('-', CONCAT(LPAD(PMOD(MONTH(Calcs.`__time`)+(Calcs.int1 * 3), 12), 2, '0'), SUBSTR(Calcs.`__time`, 8)))), SUBSTR('yyyy-MM-dd HH:mm:ss',0,LENGTH(CAST(Calcs.`__time` AS STRING)))), 'yyyy-MM-dd HH:mm:ss') END
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific -4 key03 2003-07-04 17:00:00
+2004-07-04 17:00:00.0 US/Pacific 3 key08 2005-04-04 17:00:00
+2004-07-08 17:00:00.0 US/Pacific -3 key00 2003-10-08 17:00:00
+2004-07-12 17:00:00.0 US/Pacific 2 key07 2005-01-12 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key10 2004-07-13 17:00:00
+2004-07-13 17:00:00.0 US/Pacific 0 key15 2004-07-13 17:00:00
+2004-07-16 17:00:00.0 US/Pacific 0 key12 2004-07-16 17:00:00
+2004-07-19 17:00:00.0 US/Pacific 0 key13 2004-07-19 17:00:00
+2004-07-21 17:00:00.0 US/Pacific 0 key05 2004-07-21 17:00:00
+2004-07-23 17:00:00.0 US/Pacific 3 key09 2005-04-23 17:00:00
+2004-07-24 17:00:00.0 US/Pacific -8 key11 2002-07-24 17:00:00
+2004-07-25 17:00:00.0 US/Pacific -6 key01 2003-01-25 17:00:00
+2004-07-27 17:00:00.0 US/Pacific -9 key16 2002-04-27 17:00:00
+2004-07-27 17:00:00.0 US/Pacific 0 key06 2004-07-27 17:00:00
+2004-07-28 17:00:00.0 US/Pacific 0 key04 2004-07-28 17:00:00
+2004-07-30 17:00:00.0 US/Pacific 0 key14 2004-07-30 17:00:00
+2004-08-01 17:00:00.0 US/Pacific 0 key02 2004-08-01 17:00:00
+PREHOOK: query: SELECT MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT MIN(Calcs.str2) AS temp_z_min_str2___3992540197__0_, MIN(LENGTH(Calcs.str2)) AS min_len_str2__ok FROM druid_tableau.calcs Calcs GROUP BY 1 HAVING (COUNT(1) > 0)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+eight 0
+PREHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_tableau@calcs
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT Calcs.`__time` AS none_datetime0_ok, Calcs.key AS none_key_nk, CAST((DATEDIFF(Calcs.`__time`, '2004-07-04') * 24 + COALESCE(HOUR(Calcs.`__time`), 0) - COALESCE(HOUR('2004-07-04'), 0)) AS BIGINT) AS sum_z_datediff_hour_ok FROM druid_tableau.calcs Calcs
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_tableau@calcs
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2004-07-04 17:00:00.0 US/Pacific key03 17
+2004-07-04 17:00:00.0 US/Pacific key08 17
+2004-07-08 17:00:00.0 US/Pacific key00 113
+2004-07-12 17:00:00.0 US/Pacific key07 209
+2004-07-13 17:00:00.0 US/Pacific key10 233
+2004-07-13 17:00:00.0 US/Pacific key15 233
+2004-07-16 17:00:00.0 US/Pacific key12 305
+2004-07-19 17:00:00.0 US/Pacific key13 377
+2004-07-21 17:00:00.0 US/Pacific key05 425
+2004-07-23 17:00:00.0 US/Pacific key09 473
+2004-07-24 17:00:00.0 US/Pacific key11 497
+2004-07-25 17:00:00.0 US/Pacific key01 521
+2004-07-27 17:00:00.0 US/Pacific key06 569
+2004-07-27 17:00:00.0 US/Pacific key16 569
+2004-07-28 17:00:00.0 US/Pacific key04 593
+2004-07-30 17:00:00.0 US/Pacific key14 641
+2004-08-01 17:00:00.0 US/Pacific key02 689
diff --git ql/src/test/results/clientpositive/druid/druidmini_test1.q.out ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
index 2cbd5fb..aa68f48 100644
--- ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
+++ ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
@@ -66,21 +66,19 @@ STAGE PLANS:
properties:
druid.query.json {"queryType":"timeseries","dataSource":"default.druid_table","descending":false,"granularity":"all","aggregations":[{"type":"count","name":"$f0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
druid.query.type timeseries
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: $f0 (type: bigint)
outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
ListSink
PREHOOK: query: SELECT count(*) FROM druid_table
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT count(*) FROM druid_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
6105
PREHOOK: query: EXPLAIN SELECT floor_year(`__time`), SUM(cfloat), SUM(cdouble), SUM(ctinyint), SUM(csmallint),SUM(cint), SUM(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
@@ -101,23 +99,21 @@ STAGE PLANS:
properties:
druid.query.json {"queryType":"timeseries","dataSource":"default.druid_table","descending":false,"granularity":"year","aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"cfloat"},{"type":"doubleSum","name":"$f2","fieldName":"cdouble"},{"type":"longSum","name":"$f3","fieldName":"ctinyint"},{"type":"longSum","name":"$f4","fieldName":"csmallint"},{"type":"longSum","name":"$f5","fieldName":"cint"},{"type":"longSum","name":"$f6","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
druid.query.type timeseries
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float), $f3 (type: bigint), $f4 (type: bigint), $f5 (type: bigint), $f6 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
ListSink
PREHOOK: query: SELECT floor_year(`__time`), SUM(cfloat), SUM(cdouble), SUM(ctinyint), SUM(csmallint),SUM(cint), SUM(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT floor_year(`__time`), SUM(cfloat), SUM(cdouble), SUM(ctinyint), SUM(csmallint),SUM(cint), SUM(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1968-12-31 16:00:00.0 US/Pacific -4532.57 3660538.8 -4611 3658030 688783835691 8060200254
1969-12-31 16:00:00.0 US/Pacific -35057.676 2.3648124E7 -35356 4123059 719285966109 2932345033
PREHOOK: query: EXPLAIN SELECT floor_year(`__time`), MIN(cfloat), MIN(cdouble), MIN(ctinyint), MIN(csmallint),MIN(cint), MIN(cbigint)
@@ -139,23 +135,21 @@ STAGE PLANS:
properties:
druid.query.json {"queryType":"timeseries","dataSource":"default.druid_table","descending":false,"granularity":"year","aggregations":[{"type":"doubleMin","name":"$f1","fieldName":"cfloat"},{"type":"doubleMin","name":"$f2","fieldName":"cdouble"},{"type":"longMin","name":"$f3","fieldName":"ctinyint"},{"type":"longMin","name":"$f4","fieldName":"csmallint"},{"type":"longMin","name":"$f5","fieldName":"cint"},{"type":"longMin","name":"$f6","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
druid.query.type timeseries
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float), $f3 (type: bigint), $f4 (type: bigint), $f5 (type: bigint), $f6 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
ListSink
PREHOOK: query: SELECT floor_year(`__time`), MIN(cfloat), MIN(cdouble), MIN(ctinyint), MIN(csmallint),MIN(cint), MIN(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT floor_year(`__time`), MIN(cfloat), MIN(cdouble), MIN(ctinyint), MIN(csmallint),MIN(cint), MIN(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1968-12-31 16:00:00.0 US/Pacific -1790.778 -308691.84 -1790 -313425 0 -8577981133
1969-12-31 16:00:00.0 US/Pacific -964.719 -287404.84 -1051 -292138 -1073279343 -2147311592
PREHOOK: query: EXPLAIN SELECT floor_year(`__time`), MAX(cfloat), MAX(cdouble), MAX(ctinyint), MAX(csmallint),MAX(cint), MAX(cbigint)
@@ -177,23 +171,21 @@ STAGE PLANS:
properties:
druid.query.json {"queryType":"timeseries","dataSource":"default.druid_table","descending":false,"granularity":"year","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"cfloat"},{"type":"doubleMax","name":"$f2","fieldName":"cdouble"},{"type":"longMax","name":"$f3","fieldName":"ctinyint"},{"type":"longMax","name":"$f4","fieldName":"csmallint"},{"type":"longMax","name":"$f5","fieldName":"cint"},{"type":"longMax","name":"$f6","fieldName":"cbigint"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
druid.query.type timeseries
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float), $f3 (type: bigint), $f4 (type: bigint), $f5 (type: bigint), $f6 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
ListSink
PREHOOK: query: SELECT floor_year(`__time`), MAX(cfloat), MAX(cdouble), MAX(ctinyint), MAX(csmallint),MAX(cint), MAX(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT floor_year(`__time`), MAX(cfloat), MAX(cdouble), MAX(ctinyint), MAX(csmallint),MAX(cint), MAX(cbigint)
FROM druid_table GROUP BY floor_year(`__time`)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1968-12-31 16:00:00.0 US/Pacific 62.0 57235.0 62 57235 314088763179 2144274348
1969-12-31 16:00:00.0 US/Pacific 769.164 1.9565518E7 723 57435 319104152611 4923772860
PREHOOK: query: EXPLAIN SELECT cstring1, SUM(cdouble) as s FROM druid_table GROUP BY cstring1 ORDER BY s ASC LIMIT 10
@@ -213,21 +205,19 @@ STAGE PLANS:
properties:
druid.query.json {"queryType":"groupBy","dataSource":"default.druid_table","granularity":"all","dimensions":[{"type":"default","dimension":"cstring1"}],"limitSpec":{"type":"default","limit":10,"columns":[{"dimension":"$f1","direction":"ascending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"cdouble"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
druid.query.type groupBy
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: cstring1 (type: string), $f1 (type: float)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
ListSink
PREHOOK: query: SELECT cstring1, SUM(cdouble) as s FROM druid_table GROUP BY cstring1 ORDER BY s ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT cstring1, SUM(cdouble) as s FROM druid_table GROUP BY cstring1 ORDER BY s ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1cGVWH7n1QU -596096.7
821UdmGbkEf4j -14161.827
00iT08 0.0
@@ -255,21 +245,19 @@ STAGE PLANS:
properties:
druid.query.json {"queryType":"groupBy","dataSource":"default.druid_table","granularity":"all","dimensions":[{"type":"default","dimension":"cstring2"}],"limitSpec":{"type":"default","limit":10,"columns":[{"dimension":"cstring2","direction":"ascending","dimensionOrder":"alphanumeric"}]},"aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"cdouble"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
druid.query.type groupBy
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: cstring2 (type: string), $f1 (type: float)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
ListSink
PREHOOK: query: SELECT cstring2, MAX(cdouble) FROM druid_table GROUP BY cstring2 ORDER BY cstring2 ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT cstring2, MAX(cdouble) FROM druid_table GROUP BY cstring2 ORDER BY cstring2 ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
NULL 1.9565518E7
0AAE3daA78MISbsRsHJrp2PI 0.0
0amu3m60U20Xa3 -200.0
@@ -294,38 +282,45 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: druid_table
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: __time (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: timestamp with local time zone)
- sort order: +
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- TopN Hash Memory Usage: 0.1
- Reduce Operator Tree:
- Select Operator
- expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Limit
- Number of rows: 10
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: druid_table
+ properties:
+ druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
+ druid.query.type select
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: __time (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone)
+ sort order: +
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ TopN Hash Memory Usage: 0.1
+ Reducer 2
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 10
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -337,12 +332,12 @@ PREHOOK: query: SELECT `__time`
FROM druid_table ORDER BY `__time` ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT `__time`
FROM druid_table ORDER BY `__time` ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
@@ -369,38 +364,45 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: druid_table
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1900-01-01T00:00:00.000Z/1970-03-01T08:00:00.000Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: __time (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: timestamp with local time zone)
- sort order: +
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- TopN Hash Memory Usage: 0.1
- Reduce Operator Tree:
- Select Operator
- expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Limit
- Number of rows: 10
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: druid_table
+ properties:
+ druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1900-01-01T00:00:00.000Z/1970-03-01T08:00:00.000Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
+ druid.query.type select
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: __time (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone)
+ sort order: +
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ TopN Hash Memory Usage: 0.1
+ Reducer 2
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 10
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -413,13 +415,13 @@ FROM druid_table
WHERE `__time` < '1970-03-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT `__time`
FROM druid_table
WHERE `__time` < '1970-03-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
@@ -446,38 +448,45 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: druid_table
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-03-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: __time (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: timestamp with local time zone)
- sort order: +
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- TopN Hash Memory Usage: 0.1
- Reduce Operator Tree:
- Select Operator
- expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Limit
- Number of rows: 10
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: druid_table
+ properties:
+ druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-03-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
+ druid.query.type select
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: __time (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone)
+ sort order: +
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ TopN Hash Memory Usage: 0.1
+ Reducer 2
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 10
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -490,13 +499,13 @@ FROM druid_table
WHERE `__time` >= '1968-01-01 00:00:00' AND `__time` <= '1970-03-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT `__time`
FROM druid_table
WHERE `__time` >= '1968-01-01 00:00:00' AND `__time` <= '1970-03-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
@@ -525,38 +534,45 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: druid_table
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-03-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: __time (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: timestamp with local time zone)
- sort order: +
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- TopN Hash Memory Usage: 0.1
- Reduce Operator Tree:
- Select Operator
- expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Limit
- Number of rows: 10
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: druid_table
+ properties:
+ druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-03-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
+ druid.query.type select
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: __time (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone)
+ sort order: +
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ TopN Hash Memory Usage: 0.1
+ Reducer 2
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 10
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -570,14 +586,14 @@ WHERE `__time` >= '1968-01-01 00:00:00' AND `__time` <= '1970-03-01 00:00:00'
AND `__time` < '2011-01-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT `__time`
FROM druid_table
WHERE `__time` >= '1968-01-01 00:00:00' AND `__time` <= '1970-03-01 00:00:00'
AND `__time` < '2011-01-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
@@ -604,38 +620,45 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: druid_table
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-01-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: __time (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: timestamp with local time zone)
- sort order: +
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- TopN Hash Memory Usage: 0.1
- Reduce Operator Tree:
- Select Operator
- expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Limit
- Number of rows: 10
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: druid_table
+ properties:
+ druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-01-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
+ druid.query.type select
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: __time (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone)
+ sort order: +
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ TopN Hash Memory Usage: 0.1
+ Reducer 2
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 10
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -648,13 +671,13 @@ FROM druid_table
WHERE `__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT `__time`
FROM druid_table
WHERE `__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00' ORDER BY `__time` ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
@@ -683,38 +706,45 @@ STAGE DEPENDENCIES:
STAGE PLANS:
Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: druid_table
- properties:
- druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-04-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
- druid.query.type select
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Select Operator
- expressions: __time (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: timestamp with local time zone)
- sort order: +
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- TopN Hash Memory Usage: 0.1
- Reduce Operator Tree:
- Select Operator
- expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
- outputColumnNames: _col0
- Statistics: Num rows: 9173 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- Limit
- Number of rows: 10
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 0 Basic stats: PARTIAL Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Tez
+#### A masked pattern was here ####
+ Edges:
+ Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+ Vertices:
+ Map 1
+ Map Operator Tree:
+ TableScan
+ alias: druid_table
+ properties:
+ druid.query.json {"queryType":"select","dataSource":"default.druid_table","descending":false,"intervals":["1968-01-01T08:00:00.000Z/1970-04-01T08:00:00.001Z"],"dimensions":[],"metrics":[],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
+ druid.query.type select
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: __time (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: timestamp with local time zone)
+ sort order: +
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ TopN Hash Memory Usage: 0.1
+ Reducer 2
+ Reduce Operator Tree:
+ Select Operator
+ expressions: KEY.reducesinkkey0 (type: timestamp with local time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 9173 Data size: 348640 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 10
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 10 Data size: 380 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -728,14 +758,14 @@ WHERE (`__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00')
OR (`__time` BETWEEN '1968-02-01 00:00:00' AND '1970-04-01 00:00:00') ORDER BY `__time` ASC LIMIT 10
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT `__time`
FROM druid_table
WHERE (`__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00')
OR (`__time` BETWEEN '1968-02-01 00:00:00' AND '1970-04-01 00:00:00') ORDER BY `__time` ASC LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
1969-12-31 15:59:00.0 US/Pacific
diff --git ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
index 7e01b0d..8b79f6a 100644
--- ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
+++ ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
@@ -52,11 +52,11 @@ POSTHOOK: Lineage: druid_alltypesorc.ctinyint SIMPLE [(alltypesorc)alltypesorc.F
PREHOOK: query: SELECT COUNT(*) FROM druid_alltypesorc
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_alltypesorc
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT COUNT(*) FROM druid_alltypesorc
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_alltypesorc
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
6057
PREHOOK: query: INSERT INTO TABLE druid_alltypesorc
SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
@@ -93,11 +93,11 @@ POSTHOOK: Output: default@druid_alltypesorc
PREHOOK: query: SELECT COUNT(*) FROM druid_alltypesorc
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_alltypesorc
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT COUNT(*) FROM druid_alltypesorc
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_alltypesorc
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
12162
PREHOOK: query: INSERT OVERWRITE TABLE druid_alltypesorc
SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
@@ -134,11 +134,11 @@ POSTHOOK: Output: default@druid_alltypesorc
PREHOOK: query: SELECT COUNT(*) FROM druid_alltypesorc
PREHOOK: type: QUERY
PREHOOK: Input: default@druid_alltypesorc
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: SELECT COUNT(*) FROM druid_alltypesorc
POSTHOOK: type: QUERY
POSTHOOK: Input: default@druid_alltypesorc
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
6105
PREHOOK: query: DROP TABLE druid_alltypesorc
PREHOOK: type: DROPTABLE