diff --git a/mvn.log b/mvn.log new file mode 100644 index 0000000..7dac237 --- /dev/null +++ b/mvn.log @@ -0,0 +1,1890 @@ +Building Hive Integration - Test Serde 3.0.0-SNAPSHOT +[INFO] ------------------------------------------------------------------------ +[INFO] +[INFO] --- maven-enforcer-plugin:1.3.1:enforce (enforce-no-snapshots) @ hive-it-test-serde --- +[INFO] +[INFO] --- maven-enforcer-plugin:1.3.1:enforce (enforce-banned-dependencies) @ hive-it-test-serde --- +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (download-spark) @ hive-it-test-serde --- +[INFO] Executing tasks + +main: +[INFO] Executed tasks +[INFO] +[INFO] --- maven-remote-resources-plugin:1.5:process (process-resource-bundles) @ hive-it-test-serde --- +[INFO] +[INFO] --- maven-resources-plugin:2.7:resources (default-resources) @ hive-it-test-serde --- +[INFO] Using 'UTF-8' encoding to copy filtered resources. +[INFO] skip non existing resourceDirectory /Users/jhyde/open1/hive/itests/test-serde/src/main/resources +[INFO] Copying 3 resources +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (define-classpath) @ hive-it-test-serde --- +[INFO] Executing tasks + +main: +[INFO] Executed tasks +[INFO] +[INFO] --- maven-compiler-plugin:3.6.1:compile (default-compile) @ hive-it-test-serde --- +[INFO] Nothing to compile - all classes are up to date +[INFO] +[INFO] --- maven-resources-plugin:2.7:testResources (default-testResources) @ hive-it-test-serde --- +[INFO] Using 'UTF-8' encoding to copy filtered resources. +[INFO] skip non existing resourceDirectory /Users/jhyde/open1/hive/itests/test-serde/src/test/resources +[INFO] Copying 3 resources +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (setup-test-dirs) @ hive-it-test-serde --- +[INFO] Executing tasks + +main: + [delete] Deleting directory /Users/jhyde/open1/hive/itests/test-serde/target/tmp + [delete] Deleting directory /Users/jhyde/open1/hive/itests/test-serde/target/testconf + [delete] Deleting directory /Users/jhyde/open1/hive/itests/test-serde/target/warehouse + [mkdir] Created dir: /Users/jhyde/open1/hive/itests/test-serde/target/tmp + [mkdir] Created dir: /Users/jhyde/open1/hive/itests/test-serde/target/warehouse + [mkdir] Created dir: /Users/jhyde/open1/hive/itests/test-serde/target/testconf + [copy] Copying 19 files to /Users/jhyde/open1/hive/itests/test-serde/target/testconf +[INFO] Executed tasks +[INFO] +[INFO] --- maven-compiler-plugin:3.6.1:testCompile (default-testCompile) @ hive-it-test-serde --- +[INFO] No sources to compile +[INFO] +[INFO] --- maven-surefire-plugin:2.20.1:test (default-test) @ hive-it-test-serde --- +[INFO] +[INFO] ------------------------------------------------------------------------ +[INFO] Building Hive Integration - QFile Tests 3.0.0-SNAPSHOT +[INFO] ------------------------------------------------------------------------ +[INFO] +[INFO] --- maven-enforcer-plugin:1.3.1:enforce (enforce-no-snapshots) @ hive-it-qfile --- +[INFO] +[INFO] --- maven-enforcer-plugin:1.3.1:enforce (enforce-banned-dependencies) @ hive-it-qfile --- +[INFO] +[INFO] --- properties-maven-plugin:1.0-alpha-2:read-project-properties (default) @ hive-it-qfile --- +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (download-spark) @ hive-it-qfile --- +[INFO] Executing tasks + +main: +[INFO] Executed tasks +[INFO] +[INFO] --- maven-remote-resources-plugin:1.5:process (process-resource-bundles) @ hive-it-qfile --- +[INFO] +[INFO] --- maven-resources-plugin:2.7:resources (default-resources) @ hive-it-qfile --- +[INFO] Using 'UTF-8' encoding to copy filtered resources. +[INFO] skip non existing resourceDirectory /Users/jhyde/open1/hive/itests/qtest/src/main/resources +[INFO] Copying 3 resources +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (define-classpath) @ hive-it-qfile --- +[INFO] Executing tasks + +main: +[INFO] Executed tasks +[INFO] +[INFO] --- maven-compiler-plugin:3.6.1:compile (default-compile) @ hive-it-qfile --- +[INFO] No sources to compile +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (generate-tests-sources) @ hive-it-qfile --- +[INFO] Executing tasks + +main: + [echo] /Users/jhyde/open1/hive/itests/qtest/target/test-classes:/Users/jhyde/open1/hive/itests/qtest/target/classes:/Users/jhyde/.m2/repository/org/apache/hive/hive-common/3.0.0-SNAPSHOT/hive-common-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-classification/3.0.0-SNAPSHOT/hive-classification-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-shims/3.0.0-SNAPSHOT/hive-shims-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/shims/hive-shims-common/3.0.0-SNAPSHOT/hive-shims-common-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/shims/hive-shims-0.23/3.0.0-SNAPSHOT/hive-shims-0.23-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/shims/hive-shims-scheduler/3.0.0-SNAPSHOT/hive-shims-scheduler-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-storage-api/3.0.0-SNAPSHOT/hive-storage-api-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/Users/jhyde/.m2/repository/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-lang3/3.2/commons-lang3-3.2.jar:/Users/jhyde/.m2/repository/org/apache/orc/orc-core/1.4.2/orc-core-1.4.2.jar:/Users/jhyde/.m2/repository/io/airlift/aircompressor/0.8/aircompressor-0.8.jar:/Users/jhyde/.m2/repository/jline/jline/2.12/jline-2.12.jar:/Users/jhyde/.m2/repository/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-rewrite/9.3.8.v20160314/jetty-rewrite-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-client/9.3.8.v20160314/jetty-client-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-server/9.3.8.v20160314/jetty-server-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-http/9.3.8.v20160314/jetty-http-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-io/9.3.8.v20160314/jetty-io-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-servlet/9.3.8.v20160314/jetty-servlet-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-webapp/9.3.8.v20160314/jetty-webapp-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-xml/9.3.8.v20160314/jetty-xml-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar:/Users/jhyde/.m2/repository/org/apache/logging/log4j/log4j-1.2-api/2.10.0/log4j-1.2-api-2.10.0.jar:/Users/jhyde/.m2/repository/org/apache/logging/log4j/log4j-web/2.10.0/log4j-web-2.10.0.jar:/Users/jhyde/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.10.0/log4j-slf4j-impl-2.10.0.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-compress/1.9/commons-compress-1.9.jar:/Users/jhyde/.m2/repository/org/apache/ant/ant/1.9.1/ant-1.9.1.jar:/Users/jhyde/.m2/repository/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar:/Users/jhyde/.m2/repository/com/tdunning/json/1.8/json-1.8.jar:/Users/jhyde/.m2/repository/io/dropwizard/metrics/metrics-core/3.1.0/metrics-core-3.1.0.jar:/Users/jhyde/.m2/repository/io/dropwizard/metrics/metrics-jvm/3.1.0/metrics-jvm-3.1.0.jar:/Users/jhyde/.m2/repository/io/dropwizard/metrics/metrics-json/3.1.0/metrics-json-3.1.0.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/core/jackson-databind/2.6.5/jackson-databind-2.6.5.jar:/Users/jhyde/.m2/repository/com/github/joshelser/dropwizard-metrics-hadoop-metrics2-reporter/0.1.2/dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar:/Users/jhyde/.m2/repository/javolution/javolution/5.5.1/javolution-5.5.1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-contrib/3.0.0-SNAPSHOT/hive-contrib-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/commons-codec/commons-codec/1.4/commons-codec-1.4.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-standalone-metastore/3.0.0-SNAPSHOT/hive-standalone-metastore-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/com/google/guava/guava/19.0/guava-19.0.jar:/Users/jhyde/.m2/repository/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/Users/jhyde/.m2/repository/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar:/Users/jhyde/.m2/repository/com/zaxxer/HikariCP/2.6.1/HikariCP-2.6.1.jar:/Users/jhyde/.m2/repository/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar:/Users/jhyde/.m2/repository/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar:/Users/jhyde/.m2/repository/org/antlr/antlr-runtime/3.5.2/antlr-runtime-3.5.2.jar:/Users/jhyde/.m2/repository/org/apache/derby/derby/10.11.1.1/derby-10.11.1.1.jar:/Users/jhyde/.m2/repository/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar:/Users/jhyde/.m2/repository/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar:/Users/jhyde/.m2/repository/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar:/Users/jhyde/.m2/repository/org/datanucleus/datanucleus-api-jdo/4.2.4/datanucleus-api-jdo-4.2.4.jar:/Users/jhyde/.m2/repository/org/datanucleus/datanucleus-core/4.1.17/datanucleus-core-4.1.17.jar:/Users/jhyde/.m2/repository/org/datanucleus/datanucleus-rdbms/4.1.19/datanucleus-rdbms-4.1.19.jar:/Users/jhyde/.m2/repository/org/datanucleus/javax.jdo/3.2.0-m3/javax.jdo-3.2.0-m3.jar:/Users/jhyde/.m2/repository/javax/transaction/transaction-api/1.1/transaction-api-1.1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-standalone-metastore/3.0.0-SNAPSHOT/hive-standalone-metastore-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/open1/hive/itests/custom-serde/target/classes:/Users/jhyde/open1/hive/itests/hive-unit/target/test-classes:/Users/jhyde/.m2/repository/org/apache/hive/hive-jdbc/3.0.0-SNAPSHOT/hive-jdbc-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-service/3.0.0-SNAPSHOT/hive-service-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/javax/servlet/jsp/javax.servlet.jsp-api/2.3.1/javax.servlet.jsp-api-2.3.1.jar:/Users/jhyde/.m2/repository/net/sf/jpam/jpam/1.1/jpam-1.1.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-runner/9.3.8.v20160314/jetty-runner-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-plus/9.3.8.v20160314/jetty-plus-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-annotations/9.3.8.v20160314/jetty-annotations-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-jaas/9.3.8.v20160314/jetty-jaas-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/websocket/websocket-server/9.3.8.v20160314/websocket-server-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/websocket/websocket-common/9.3.8.v20160314/websocket-common-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/websocket/websocket-api/9.3.8.v20160314/websocket-api-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/websocket/websocket-client/9.3.8.v20160314/websocket-client-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/websocket/websocket-servlet/9.3.8.v20160314/websocket-servlet-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-jndi/9.3.8.v20160314/jetty-jndi-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/apache-jsp/9.3.8.v20160314/apache-jsp-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/toolchain/jetty-schemas/3.1/jetty-schemas-3.1.jar:/Users/jhyde/.m2/repository/org/eclipse/jdt/core/compiler/ecj/4.4.2/ecj-4.4.2.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/apache-jstl/9.3.8.v20160314/apache-jstl-9.3.8.v20160314.jar:/Users/jhyde/.m2/repository/org/apache/taglibs/taglibs-standard-spec/1.2.5/taglibs-standard-spec-1.2.5.jar:/Users/jhyde/.m2/repository/org/apache/taglibs/taglibs-standard-impl/1.2.5/taglibs-standard-impl-1.2.5.jar:/Users/jhyde/.m2/repository/tomcat/jasper-compiler/5.5.23/jasper-compiler-5.5.23.jar:/Users/jhyde/.m2/repository/javax/servlet/jsp-api/2.0/jsp-api-2.0.jar:/Users/jhyde/.m2/repository/ant/ant/1.6.5/ant-1.6.5.jar:/Users/jhyde/.m2/repository/tomcat/jasper-runtime/5.5.23/jasper-runtime-5.5.23.jar:/Users/jhyde/.m2/repository/commons-el/commons-el/1.0/commons-el-1.0.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-ext-client/3.0.0-SNAPSHOT/hive-llap-ext-client-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hcatalog/hive-hcatalog-core/3.0.0-SNAPSHOT/hive-hcatalog-core-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/Users/jhyde/.m2/repository/org/apache/hive/hcatalog/hive-hcatalog-streaming/3.0.0-SNAPSHOT/hive-hcatalog-streaming-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hcatalog/hive-hcatalog-server-extensions/3.0.0-SNAPSHOT/hive-hcatalog-server-extensions-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/javax/jms/jms/1.1/jms-1.1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hcatalog/hive-webhcat-java-client/3.0.0-SNAPSHOT/hive-webhcat-java-client-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-service/3.0.0-SNAPSHOT/hive-service-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-exec/3.0.0-SNAPSHOT/hive-exec-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/open1/hive/itests/util/target/classes:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-minicluster/1.7.3/accumulo-minicluster-1.7.3.jar:/Users/jhyde/.m2/repository/com/beust/jcommander/1.32/jcommander-1.32.jar:/Users/jhyde/.m2/repository/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/Users/jhyde/.m2/repository/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-core/1.7.3/accumulo-core-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-gc/1.7.3/accumulo-gc-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-master/1.7.3/accumulo-master-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-monitor/1.7.3/accumulo-monitor-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-shell/1.7.3/accumulo-shell-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-server-base/1.7.3/accumulo-server-base-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-start/1.7.3/accumulo-start-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-tracer/1.7.3/accumulo-tracer-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-tserver/1.7.3/accumulo-tserver-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-vfs2/2.1/commons-vfs2-2.1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-minicluster/3.0.0-beta1/hadoop-minicluster-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-accumulo-handler/3.0.0-SNAPSHOT/hive-accumulo-handler-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-fate/1.7.3/accumulo-fate-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/accumulo/accumulo-trace/1.7.3/accumulo-trace-1.7.3.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-accumulo-handler/3.0.0-SNAPSHOT/hive-accumulo-handler-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-cli/3.0.0-SNAPSHOT/hive-cli-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-beeline/3.0.0-SNAPSHOT/hive-beeline-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/net/sf/supercsv/super-csv/2.2.0/super-csv-2.2.0.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-hbase-handler/3.0.0-SNAPSHOT/hive-hbase-handler-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-hbase-handler/3.0.0-SNAPSHOT/hive-hbase-handler-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-metastore/3.0.0-SNAPSHOT/hive-metastore-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar:/Users/jhyde/.m2/repository/javax/transaction/jta/1.1/jta-1.1.jar:/Users/jhyde/.m2/repository/co/cask/tephra/tephra-api/0.6.0/tephra-api-0.6.0.jar:/Users/jhyde/.m2/repository/co/cask/tephra/tephra-core/0.6.0/tephra-core-0.6.0.jar:/Users/jhyde/.m2/repository/com/google/inject/extensions/guice-assistedinject/3.0/guice-assistedinject-3.0.jar:/Users/jhyde/.m2/repository/org/apache/twill/twill-common/0.6.0-incubating/twill-common-0.6.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/twill/twill-core/0.6.0-incubating/twill-core-0.6.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/twill/twill-api/0.6.0-incubating/twill-api-0.6.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/twill/twill-discovery-api/0.6.0-incubating/twill-discovery-api-0.6.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/twill/twill-discovery-core/0.6.0-incubating/twill-discovery-core-0.6.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/twill/twill-zookeeper/0.6.0-incubating/twill-zookeeper-0.6.0-incubating.jar:/Users/jhyde/.m2/repository/co/cask/tephra/tephra-hbase-compat-1.0/0.6.0/tephra-hbase-compat-1.0-0.6.0.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-client/2.0.0-alpha4/hbase-client-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/jruby/jcodings/jcodings/1.0.18/jcodings-1.0.18.jar:/Users/jhyde/.m2/repository/org/jruby/joni/joni/2.1.11/joni-2.1.11.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4-tests.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-serde/3.0.0-SNAPSHOT/hive-serde-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-service-rpc/3.0.0-SNAPSHOT/hive-service-rpc-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar:/Users/jhyde/.m2/repository/org/apache/avro/avro/1.7.7/avro-1.7.7.jar:/Users/jhyde/.m2/repository/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/Users/jhyde/.m2/repository/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar:/Users/jhyde/.m2/repository/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar:/Users/jhyde/.m2/repository/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar:/Users/jhyde/.m2/repository/org/apache/parquet/parquet-hadoop-bundle/1.9.0/parquet-hadoop-bundle-1.9.0.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-exec/3.0.0-SNAPSHOT/hive-exec-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-vector-code-gen/3.0.0-SNAPSHOT/hive-vector-code-gen-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/velocity/velocity/1.5/velocity-1.5.jar:/Users/jhyde/.m2/repository/oro/oro/2.0.8/oro-2.0.8.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-tez/3.0.0-SNAPSHOT/hive-llap-tez-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/commons-httpclient/commons-httpclient/3.0.1/commons-httpclient-3.0.1.jar:/Users/jhyde/.m2/repository/commons-io/commons-io/2.4/commons-io-2.4.jar:/Users/jhyde/.m2/repository/org/antlr/ST4/4.0.4/ST4-4.0.4.jar:/Users/jhyde/.m2/repository/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/Users/jhyde/.m2/repository/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/Users/jhyde/.m2/repository/org/apache/curator/curator-framework/2.12.0/curator-framework-2.12.0.jar:/Users/jhyde/.m2/repository/org/codehaus/groovy/groovy-all/2.4.11/groovy-all-2.4.11.jar:/Users/jhyde/.m2/repository/org/apache/calcite/calcite-core/1.15.0/calcite-core-1.15.0.jar:/Users/jhyde/.m2/repository/org/apache/calcite/calcite-linq4j/1.15.0/calcite-linq4j-1.15.0.jar:/Users/jhyde/.m2/repository/com/esri/geometry/esri-geometry-api/2.0.0/esri-geometry-api-2.0.0.jar:/Users/jhyde/.m2/repository/com/yahoo/datasketches/sketches-core/0.9.0/sketches-core-0.9.0.jar:/Users/jhyde/.m2/repository/com/yahoo/datasketches/memory/0.9.0/memory-0.9.0.jar:/Users/jhyde/.m2/repository/org/codehaus/janino/janino/2.7.6/janino-2.7.6.jar:/Users/jhyde/.m2/repository/org/codehaus/janino/commons-compiler/2.7.6/commons-compiler-2.7.6.jar:/Users/jhyde/.m2/repository/org/apache/calcite/calcite-druid/1.15.0/calcite-druid-1.15.0.jar:/Users/jhyde/.m2/repository/org/apache/calcite/avatica/avatica/1.10.0/avatica-1.10.0.jar:/Users/jhyde/.m2/repository/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/Users/jhyde/.m2/repository/stax/stax-api/1.0.1/stax-api-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-registry/3.0.0-beta1/hadoop-yarn-registry-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-druid-handler/3.0.0-SNAPSHOT/hive-druid-handler-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/com/metamx/java-util/1.3.2/java-util-1.3.2.jar:/Users/jhyde/.m2/repository/org/skife/config/config-magic/0.17/config-magic-0.17.jar:/Users/jhyde/.m2/repository/org/mozilla/rhino/1.7R5/rhino-1.7R5.jar:/Users/jhyde/.m2/repository/com/jayway/jsonpath/json-path/2.1.0/json-path-2.1.0.jar:/Users/jhyde/.m2/repository/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/Users/jhyde/.m2/repository/org/asynchttpclient/async-http-client/2.0.37/async-http-client-2.0.37.jar:/Users/jhyde/.m2/repository/org/asynchttpclient/async-http-client-netty-utils/2.0.37/async-http-client-netty-utils-2.0.37.jar:/Users/jhyde/.m2/repository/io/netty/netty-buffer/4.0.52.Final/netty-buffer-4.0.52.Final.jar:/Users/jhyde/.m2/repository/io/netty/netty-codec-http/4.0.52.Final/netty-codec-http-4.0.52.Final.jar:/Users/jhyde/.m2/repository/io/netty/netty-codec/4.0.52.Final/netty-codec-4.0.52.Final.jar:/Users/jhyde/.m2/repository/io/netty/netty-handler/4.0.52.Final/netty-handler-4.0.52.Final.jar:/Users/jhyde/.m2/repository/io/netty/netty-transport/4.0.52.Final/netty-transport-4.0.52.Final.jar:/Users/jhyde/.m2/repository/io/netty/netty-transport-native-epoll/4.0.52.Final/netty-transport-native-epoll-4.0.52.Final-linux-x86_64.jar:/Users/jhyde/.m2/repository/io/netty/netty-common/4.0.52.Final/netty-common-4.0.52.Final.jar:/Users/jhyde/.m2/repository/org/asynchttpclient/netty-resolver-dns/2.0.37/netty-resolver-dns-2.0.37.jar:/Users/jhyde/.m2/repository/org/asynchttpclient/netty-resolver/2.0.37/netty-resolver-2.0.37.jar:/Users/jhyde/.m2/repository/org/asynchttpclient/netty-codec-dns/2.0.37/netty-codec-dns-2.0.37.jar:/Users/jhyde/.m2/repository/org/reactivestreams/reactive-streams/1.0.0/reactive-streams-1.0.0.jar:/Users/jhyde/.m2/repository/com/typesafe/netty/netty-reactive-streams/1.0.8/netty-reactive-streams-1.0.8.jar:/Users/jhyde/.m2/repository/org/gridkit/lab/jvm-attach-api/1.2/jvm-attach-api-1.2.jar:/Users/jhyde/.m2/repository/io/druid/druid-server/0.11.0/druid-server-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/druid-console/0.0.2/druid-console-0.0.2.jar:/Users/jhyde/.m2/repository/javax/inject/javax.inject/1/javax.inject-1.jar:/Users/jhyde/.m2/repository/org/glassfish/javax.el/3.0.0/javax.el-3.0.0.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/jaxrs/jackson-jaxrs-smile-provider/2.4.6/jackson-jaxrs-smile-provider-2.4.6.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.4.6/jackson-dataformat-smile-2.4.6.jar:/Users/jhyde/.m2/repository/io/tesla/aether/tesla-aether/0.0.5/tesla-aether-0.0.5.jar:/Users/jhyde/.m2/repository/org/eclipse/aether/aether-spi/0.9.0.M2/aether-spi-0.9.0.M2.jar:/Users/jhyde/.m2/repository/org/eclipse/aether/aether-util/0.9.0.M2/aether-util-0.9.0.M2.jar:/Users/jhyde/.m2/repository/org/eclipse/aether/aether-impl/0.9.0.M2/aether-impl-0.9.0.M2.jar:/Users/jhyde/.m2/repository/org/eclipse/aether/aether-connector-file/0.9.0.M2/aether-connector-file-0.9.0.M2.jar:/Users/jhyde/.m2/repository/io/tesla/aether/aether-connector-okhttp/0.0.9/aether-connector-okhttp-0.0.9.jar:/Users/jhyde/.m2/repository/org/apache/maven/wagon/wagon-provider-api/2.4/wagon-provider-api-2.4.jar:/Users/jhyde/.m2/repository/org/apache/maven/maven-aether-provider/3.1.1/maven-aether-provider-3.1.1.jar:/Users/jhyde/.m2/repository/org/apache/maven/maven-model/3.1.1/maven-model-3.1.1.jar:/Users/jhyde/.m2/repository/org/apache/maven/maven-model-builder/3.1.1/maven-model-builder-3.1.1.jar:/Users/jhyde/.m2/repository/org/apache/maven/maven-repository-metadata/3.1.1/maven-repository-metadata-3.1.1.jar:/Users/jhyde/.m2/repository/org/codehaus/plexus/plexus-utils/3.0.15/plexus-utils-3.0.15.jar:/Users/jhyde/.m2/repository/org/apache/maven/maven-settings-builder/3.1.1/maven-settings-builder-3.1.1.jar:/Users/jhyde/.m2/repository/org/codehaus/plexus/plexus-interpolation/1.19/plexus-interpolation-1.19.jar:/Users/jhyde/.m2/repository/org/apache/maven/maven-settings/3.1.1/maven-settings-3.1.1.jar:/Users/jhyde/.m2/repository/org/eclipse/aether/aether-api/0.9.0.M2/aether-api-0.9.0.M2.jar:/Users/jhyde/.m2/repository/net/spy/spymemcached/2.12.3/spymemcached-2.12.3.jar:/Users/jhyde/.m2/repository/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/Users/jhyde/.m2/repository/com/ircclouds/irc/irc-api/1.0-0014/irc-api-1.0-0014.jar:/Users/jhyde/.m2/repository/com/maxmind/geoip2/geoip2/0.4.0/geoip2-0.4.0.jar:/Users/jhyde/.m2/repository/com/maxmind/maxminddb/maxminddb/0.2.0/maxminddb-0.2.0.jar:/Users/jhyde/.m2/repository/org/apache/derby/derbynet/10.11.1.1/derbynet-10.11.1.1.jar:/Users/jhyde/.m2/repository/org/apache/derby/derbyclient/10.11.1.1/derbyclient-10.11.1.1.jar:/Users/jhyde/.m2/repository/it/unimi/dsi/fastutil/7.2.0/fastutil-7.2.0.jar:/Users/jhyde/.m2/repository/io/druid/druid-processing/0.11.0/druid-processing-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/druid-common/0.11.0/druid-common-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/java-util/0.11.0/java-util-0.11.0.jar:/Users/jhyde/.m2/repository/net/thisptr/jackson-jq/0.0.7/jackson-jq-0.0.7.jar:/Users/jhyde/.m2/repository/io/druid/druid-api/0.11.0/druid-api-0.11.0.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-dbcp2/2.0.1/commons-dbcp2-2.0.1.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-pool2/2.2/commons-pool2-2.2.jar:/Users/jhyde/.m2/repository/org/hibernate/hibernate-validator/5.1.3.Final/hibernate-validator-5.1.3.Final.jar:/Users/jhyde/.m2/repository/org/jboss/logging/jboss-logging/3.1.3.GA/jboss-logging-3.1.3.GA.jar:/Users/jhyde/.m2/repository/com/fasterxml/classmate/1.0.0/classmate-1.0.0.jar:/Users/jhyde/.m2/repository/javax/el/javax.el-api/3.0.0/javax.el-api-3.0.0.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-guava/2.4.6/jackson-datatype-guava-2.4.6.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/datatype/jackson-datatype-joda/2.4.6/jackson-datatype-joda-2.4.6.jar:/Users/jhyde/.m2/repository/com/google/inject/extensions/guice-multibindings/4.1.0/guice-multibindings-4.1.0.jar:/Users/jhyde/.m2/repository/org/jdbi/jdbi/2.63.1/jdbi-2.63.1.jar:/Users/jhyde/.m2/repository/org/apache/logging/log4j/log4j-jul/2.5/log4j-jul-2.5.jar:/Users/jhyde/.m2/repository/org/slf4j/jcl-over-slf4j/1.7.12/jcl-over-slf4j-1.7.12.jar:/Users/jhyde/.m2/repository/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar:/Users/jhyde/.m2/repository/javax/activation/activation/1.1.1/activation-1.1.1.jar:/Users/jhyde/.m2/repository/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar:/Users/jhyde/.m2/repository/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar:/Users/jhyde/.m2/repository/net/iharder/base64/2.3.8/base64-2.3.8.jar:/Users/jhyde/.m2/repository/org/antlr/antlr4-runtime/4.5.1/antlr4-runtime-4.5.1.jar:/Users/jhyde/.m2/repository/io/druid/druid-hll/0.11.0/druid-hll-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/bytebuffer-collections/0.11.0/bytebuffer-collections-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/extendedset/0.11.0/extendedset-0.11.0.jar:/Users/jhyde/.m2/repository/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/Users/jhyde/.m2/repository/com/ibm/icu/icu4j/54.1.1/icu4j-54.1.1.jar:/Users/jhyde/.m2/repository/org/mapdb/mapdb/1.0.8/mapdb-1.0.8.jar:/Users/jhyde/.m2/repository/org/ow2/asm/asm/5.2/asm-5.2.jar:/Users/jhyde/.m2/repository/org/ow2/asm/asm-commons/5.2/asm-commons-5.2.jar:/Users/jhyde/.m2/repository/org/ow2/asm/asm-tree/5.2/asm-tree-5.2.jar:/Users/jhyde/.m2/repository/io/druid/extensions/druid-hdfs-storage/0.11.0/druid-hdfs-storage-0.11.0.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-aws/2.7.3/hadoop-aws-2.7.3.jar:/Users/jhyde/.m2/repository/com/amazonaws/aws-java-sdk-s3/1.10.77/aws-java-sdk-s3-1.10.77.jar:/Users/jhyde/.m2/repository/com/amazonaws/aws-java-sdk-kms/1.10.77/aws-java-sdk-kms-1.10.77.jar:/Users/jhyde/.m2/repository/com/amazonaws/aws-java-sdk-core/1.10.77/aws-java-sdk-core-1.10.77.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.5.3/jackson-dataformat-cbor-2.5.3.jar:/Users/jhyde/.m2/repository/io/druid/extensions/mysql-metadata-storage/0.11.0/mysql-metadata-storage-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/extensions/postgresql-metadata-storage/0.11.0/postgresql-metadata-storage-0.11.0.jar:/Users/jhyde/.m2/repository/org/postgresql/postgresql/9.4.1208.jre7/postgresql-9.4.1208.jre7.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-druid-handler/3.0.0-SNAPSHOT/hive-druid-handler-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-jdbc-handler/3.0.0-SNAPSHOT/hive-jdbc-handler-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/junit/junit/4.11/junit-4.11.jar:/Users/jhyde/.m2/repository/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/Users/jhyde/.m2/repository/com/sun/jersey/jersey-servlet/1.19/jersey-servlet-1.19.jar:/Users/jhyde/.m2/repository/com/sun/jersey/jersey-server/1.19/jersey-server-1.19.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-archives/3.0.0-beta1/hadoop-archives-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-common/3.0.0-beta1/hadoop-common-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-annotations/3.0.0-beta1/hadoop-annotations-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar:/Users/jhyde/.m2/repository/commons-net/commons-net/3.1/commons-net-3.1.jar:/Users/jhyde/.m2/repository/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-util/9.3.19.v20170502/jetty-util-9.3.19.v20170502.jar:/Users/jhyde/.m2/repository/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/Users/jhyde/.m2/repository/com/sun/jersey/jersey-core/1.19/jersey-core-1.19.jar:/Users/jhyde/.m2/repository/javax/ws/rs/jsr311-api/1.1.1/jsr311-api-1.1.1.jar:/Users/jhyde/.m2/repository/com/sun/jersey/jersey-json/1.19/jersey-json-1.19.jar:/Users/jhyde/.m2/repository/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar:/Users/jhyde/.m2/repository/org/codehaus/jackson/jackson-jaxrs/1.9.13/jackson-jaxrs-1.9.13.jar:/Users/jhyde/.m2/repository/org/codehaus/jackson/jackson-xc/1.9.13/jackson-xc-1.9.13.jar:/Users/jhyde/.m2/repository/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar:/Users/jhyde/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar:/Users/jhyde/.m2/repository/commons-beanutils/commons-beanutils/1.9.3/commons-beanutils-1.9.3.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-configuration2/2.1.1/commons-configuration2-2.1.1.jar:/Users/jhyde/.m2/repository/com/google/re2j/re2j/1.1/re2j-1.1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-auth/3.0.0-beta1/hadoop-auth-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/com/nimbusds/nimbus-jose-jwt/4.41.1/nimbus-jose-jwt-4.41.1.jar:/Users/jhyde/.m2/repository/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar:/Users/jhyde/.m2/repository/net/minidev/json-smart/2.3/json-smart-2.3.jar:/Users/jhyde/.m2/repository/net/minidev/accessors-smart/1.2/accessors-smart-1.2.jar:/Users/jhyde/.m2/repository/com/jcraft/jsch/0.1.54/jsch-0.1.54.jar:/Users/jhyde/.m2/repository/org/apache/curator/curator-client/2.12.0/curator-client-2.12.0.jar:/Users/jhyde/.m2/repository/org/apache/curator/curator-recipes/2.12.0/curator-recipes-2.12.0.jar:/Users/jhyde/.m2/repository/org/apache/htrace/htrace-core4/4.1.0-incubating/htrace-core4-4.1.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-simplekdc/1.0.1/kerb-simplekdc-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-client/1.0.1/kerb-client-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerby-config/1.0.1/kerby-config-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-core/1.0.1/kerb-core-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerby-pkix/1.0.1/kerby-pkix-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerby-asn1/1.0.1/kerby-asn1-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerby-util/1.0.1/kerby-util-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-common/1.0.1/kerb-common-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-crypto/1.0.1/kerb-crypto-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-util/1.0.1/kerb-util-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/token-provider/1.0.1/token-provider-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-admin/1.0.1/kerb-admin-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-server/1.0.1/kerb-server-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerb-identity/1.0.1/kerb-identity-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/kerby/kerby-xdr/1.0.1/kerby-xdr-1.0.1.jar:/Users/jhyde/.m2/repository/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar:/Users/jhyde/.m2/repository/com/fasterxml/woodstox/woodstox-core/5.0.3/woodstox-core-5.0.3.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-common/3.0.0-beta1/hadoop-common-3.0.0-beta1-tests.jar:/Users/jhyde/.m2/repository/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-distcp/3.0.0-beta1/hadoop-distcp-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-hdfs/3.0.0-beta1/hadoop-hdfs-3.0.0-beta1-tests.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-util-ajax/9.3.19.v20170502/jetty-util-ajax-9.3.19.v20170502.jar:/Users/jhyde/.m2/repository/commons-daemon/commons-daemon/1.0.13/commons-daemon-1.0.13.jar:/Users/jhyde/.m2/repository/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:/Users/jhyde/.m2/repository/io/netty/netty-all/4.0.52.Final/netty-all-4.0.52.Final.jar:/Users/jhyde/.m2/repository/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-hdfs/3.0.0-beta1/hadoop-hdfs-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.0.0-beta1/hadoop-mapreduce-client-jobclient-3.0.0-beta1-tests.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-common/3.0.0-beta1/hadoop-mapreduce-client-common-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/com/google/inject/extensions/guice-servlet/4.0/guice-servlet-4.0.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-hs/3.0.0-beta1/hadoop-mapreduce-client-hs-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-hdfs-client/3.0.0-beta1/hadoop-hdfs-client-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/com/squareup/okhttp/okhttp/2.4.0/okhttp-2.4.0.jar:/Users/jhyde/.m2/repository/com/squareup/okio/okio/1.4.0/okio-1.4.0.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-app/3.0.0-beta1/hadoop-mapreduce-client-app-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-shuffle/3.0.0-beta1/hadoop-mapreduce-client-shuffle-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-core/3.0.0-beta1/hadoop-mapreduce-client-core-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-server/3.0.0-SNAPSHOT/hive-llap-server-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-common/3.0.0-SNAPSHOT/hive-llap-common-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-client/3.0.0-SNAPSHOT/hive-llap-client-3.0.0-SNAPSHOT.jar:/Users/jhyde/.m2/repository/org/apache/slider/slider-core/0.92.0-incubating/slider-core-0.92.0-incubating.jar:/Users/jhyde/.m2/repository/org/yaml/snakeyaml/1.16/snakeyaml-1.16.jar:/Users/jhyde/.m2/repository/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-common/3.0.0-SNAPSHOT/hive-llap-common-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/.m2/repository/org/apache/hive/hive-llap-server/3.0.0-SNAPSHOT/hive-llap-server-3.0.0-SNAPSHOT-tests.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-tests/3.0.0-beta1/hadoop-yarn-server-tests-3.0.0-beta1-tests.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-common/3.0.0-beta1/hadoop-yarn-server-common-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/geronimo/specs/geronimo-jcache_1.0_spec/1.0-alpha-1/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/Users/jhyde/.m2/repository/org/ehcache/ehcache/3.3.1/ehcache-3.3.1.jar:/Users/jhyde/.m2/repository/com/zaxxer/HikariCP-java7/2.4.12/HikariCP-java7-2.4.12.jar:/Users/jhyde/.m2/repository/com/microsoft/sqlserver/mssql-jdbc/6.2.1.jre7/mssql-jdbc-6.2.1.jre7.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-nodemanager/3.0.0-beta1/hadoop-yarn-server-nodemanager-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/com/codahale/metrics/metrics-core/3.0.1/metrics-core-3.0.1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-resourcemanager/3.0.0-beta1/hadoop-yarn-server-resourcemanager-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/3.0.0-beta1/hadoop-yarn-server-applicationhistoryservice-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/de/ruedigermoeller/fst/2.50/fst-2.50.jar:/Users/jhyde/.m2/repository/com/cedarsoftware/java-util/1.9.0/java-util-1.9.0.jar:/Users/jhyde/.m2/repository/com/cedarsoftware/json-io/2.5.1/json-io-2.5.1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-timelineservice/3.0.0-beta1/hadoop-yarn-server-timelineservice-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-csv/1.0/commons-csv-1.0.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-client/3.0.0-beta1/hadoop-yarn-client-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-common/3.0.0-beta1/hadoop-yarn-common-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/javax/xml/bind/jaxb-api/2.2.11/jaxb-api-2.2.11.jar:/Users/jhyde/.m2/repository/com/sun/jersey/jersey-client/1.19/jersey-client-1.19.jar:/Users/jhyde/.m2/repository/com/google/inject/guice/4.0/guice-4.0.jar:/Users/jhyde/.m2/repository/aopalliance/aopalliance/1.0/aopalliance-1.0.jar:/Users/jhyde/.m2/repository/com/sun/jersey/contribs/jersey-guice/1.19/jersey-guice-1.19.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.7.8/jackson-core-2.7.8.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/module/jackson-module-jaxb-annotations/2.7.8/jackson-module-jaxb-annotations-2.7.8.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/jaxrs/jackson-jaxrs-json-provider/2.7.8/jackson-jaxrs-json-provider-2.7.8.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/jaxrs/jackson-jaxrs-base/2.7.8/jackson-jaxrs-base-2.7.8.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-api/3.0.0-beta1/hadoop-yarn-api-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/com/fasterxml/jackson/core/jackson-annotations/2.7.8/jackson-annotations-2.7.8.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/thirdparty/hbase-shaded-miscellaneous/1.0.1/hbase-shaded-miscellaneous-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar:/Users/jhyde/.m2/repository/org/apache/htrace/htrace-core/3.2.0-incubating/htrace-core-3.2.0-incubating.jar:/Users/jhyde/.m2/repository/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/Users/jhyde/.m2/repository/com/github/stephenc/findbugs/findbugs-annotations/1.3.9-1/findbugs-annotations-1.3.9-1.jar:/Users/jhyde/.m2/repository/org/apache/yetus/audience-annotations/0.5.0/audience-annotations-0.5.0.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-common/2.0.0-alpha4/hbase-common-2.0.0-alpha4-tests.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-metrics-api/2.0.0-alpha4/hbase-metrics-api-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-hadoop-compat/2.0.0-alpha4/hbase-hadoop-compat-2.0.0-alpha4-tests.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-metrics/2.0.0-alpha4/hbase-metrics-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-hadoop2-compat/2.0.0-alpha4/hbase-hadoop2-compat-2.0.0-alpha4-tests.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/thirdparty/hbase-shaded-protobuf/1.0.1/hbase-shaded-protobuf-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/hbase/thirdparty/hbase-shaded-netty/1.0.1/hbase-shaded-netty-1.0.1.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-http/2.0.0-alpha4/hbase-http-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/glassfish/jersey/core/jersey-server/2.25.1/jersey-server-2.25.1.jar:/Users/jhyde/.m2/repository/org/glassfish/jersey/core/jersey-common/2.25.1/jersey-common-2.25.1.jar:/Users/jhyde/.m2/repository/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.25.1/jersey-guava-2.25.1.jar:/Users/jhyde/.m2/repository/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/Users/jhyde/.m2/repository/org/glassfish/jersey/core/jersey-client/2.25.1/jersey-client-2.25.1.jar:/Users/jhyde/.m2/repository/org/glassfish/jersey/media/jersey-media-jaxb/2.25.1/jersey-media-jaxb-2.25.1.jar:/Users/jhyde/.m2/repository/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/Users/jhyde/.m2/repository/org/glassfish/hk2/hk2-api/2.5.0-b32/hk2-api-2.5.0-b32.jar:/Users/jhyde/.m2/repository/org/glassfish/hk2/hk2-utils/2.5.0-b32/hk2-utils-2.5.0-b32.jar:/Users/jhyde/.m2/repository/org/glassfish/hk2/external/aopalliance-repackaged/2.5.0-b32/aopalliance-repackaged-2.5.0-b32.jar:/Users/jhyde/.m2/repository/org/glassfish/hk2/external/javax.inject/2.5.0-b32/javax.inject-2.5.0-b32.jar:/Users/jhyde/.m2/repository/org/glassfish/hk2/hk2-locator/2.5.0-b32/hk2-locator-2.5.0-b32.jar:/Users/jhyde/.m2/repository/org/javassist/javassist/3.20.0-GA/javassist-3.20.0-GA.jar:/Users/jhyde/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet-core/2.25.1/jersey-container-servlet-core-2.25.1.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-protocol/2.0.0-alpha4/hbase-protocol-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-protocol-shaded/2.0.0-alpha4/hbase-protocol-shaded-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-procedure/2.0.0-alpha4/hbase-procedure-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-replication/2.0.0-alpha4/hbase-replication-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-prefix-tree/2.0.0-alpha4/hbase-prefix-tree-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/glassfish/web/javax.servlet.jsp/2.3.2/javax.servlet.jsp-2.3.2.jar:/Users/jhyde/.m2/repository/org/jamon/jamon-runtime/2.3.1/jamon-runtime-2.3.1.jar:/Users/jhyde/.m2/repository/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/Users/jhyde/.m2/repository/com/lmax/disruptor/3.3.6/disruptor-3.3.6.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-client/3.0.0-beta1/hadoop-client-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-jobclient/3.0.0-beta1/hadoop-mapreduce-client-jobclient-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-server/2.0.0-alpha4/hbase-server-2.0.0-alpha4-tests.jar:/Users/jhyde/.m2/repository/org/apache/hbase/hbase-mapreduce/2.0.0-alpha4/hbase-mapreduce-2.0.0-alpha4.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-tests/0.9.1/tez-tests-0.9.1-tests.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-common/0.9.1/tez-common-0.9.1.jar:/Users/jhyde/.m2/repository/org/slf4j/slf4j-log4j12/1.7.10/slf4j-log4j12-1.7.10.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-examples/0.9.1/tez-examples-0.9.1.jar:/Library/Java/JavaVirtualMachines/jdk1.8.0.jdk/Contents/Home/lib/tools.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-api/0.9.1/tez-api-0.9.1.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-runtime-library/0.9.1/tez-runtime-library-0.9.1.jar:/Users/jhyde/.m2/repository/org/roaringbitmap/RoaringBitmap/0.4.9/RoaringBitmap-0.4.9.jar:/Users/jhyde/.m2/repository/com/ning/async-http-client/1.8.16/async-http-client-1.8.16.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-mapreduce/0.9.1/tez-mapreduce-0.9.1.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-dag/0.9.1/tez-dag-0.9.1.jar:/Users/jhyde/.m2/repository/org/apache/tez/hadoop-shim/0.9.1/hadoop-shim-0.9.1.jar:/Users/jhyde/.m2/repository/org/apache/tez/tez-runtime-internals/0.9.1/tez-runtime-internals-0.9.1.jar:/Users/jhyde/.m2/repository/org/apache/hadoop/hadoop-yarn-server-web-proxy/3.0.0-beta1/hadoop-yarn-server-web-proxy-3.0.0-beta1.jar:/Users/jhyde/.m2/repository/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar:/Users/jhyde/.m2/repository/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar:/Users/jhyde/.m2/repository/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/Users/jhyde/.m2/repository/javax/servlet/servlet-api/2.5/servlet-api-2.5.jar:/Users/jhyde/open1/hive/itests/qtest-druid/target/classes:/Users/jhyde/.m2/repository/io/druid/druid-services/0.11.0/druid-services-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/druid-indexing-hadoop/0.11.0/druid-indexing-hadoop-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/druid-indexing-service/0.11.0/druid-indexing-service-0.11.0.jar:/Users/jhyde/.m2/repository/io/druid/druid-sql/0.11.0/druid-sql-0.11.0.jar:/Users/jhyde/.m2/repository/org/apache/calcite/avatica/avatica-core/1.9.0/avatica-core-1.9.0.jar:/Users/jhyde/.m2/repository/org/apache/calcite/avatica/avatica-metrics/1.9.0/avatica-metrics-1.9.0.jar:/Users/jhyde/.m2/repository/org/apache/calcite/avatica/avatica-server/1.9.0/avatica-server-1.9.0.jar:/Users/jhyde/.m2/repository/io/airlift/airline/0.7/airline-0.7.jar:/Users/jhyde/.m2/repository/org/apache/logging/log4j/log4j-api/2.10.0/log4j-api-2.10.0.jar:/Users/jhyde/.m2/repository/org/apache/logging/log4j/log4j-core/2.10.0/log4j-core-2.10.0.jar:/Users/jhyde/.m2/repository/org/apache/curator/curator-x-discovery/4.0.0/curator-x-discovery-4.0.0.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-servlets/9.3.19.v20170502/jetty-servlets-9.3.19.v20170502.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-continuation/9.3.19.v20170502/jetty-continuation-9.3.19.v20170502.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-proxy/9.3.19.v20170502/jetty-proxy-9.3.19.v20170502.jar:/Users/jhyde/.m2/repository/org/eclipse/jetty/jetty-security/9.3.19.v20170502/jetty-security-9.3.19.v20170502.jar:/Users/jhyde/.m2/repository/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar:/Users/jhyde/.m2/repository/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar +[INFO] Executed tasks +[INFO] +[INFO] --- build-helper-maven-plugin:1.8:add-test-source (add-test-sources) @ hive-it-qfile --- +[INFO] Test Source directory: /Users/jhyde/open1/hive/itests/qtest/target/generated-test-sources/java added. +[INFO] +[INFO] --- maven-resources-plugin:2.7:testResources (default-testResources) @ hive-it-qfile --- +[INFO] Using 'UTF-8' encoding to copy filtered resources. +[INFO] skip non existing resourceDirectory /Users/jhyde/open1/hive/itests/qtest/src/test/resources +[INFO] Copying 3 resources +[INFO] +[INFO] --- maven-antrun-plugin:1.7:run (setup-test-dirs) @ hive-it-qfile --- +[INFO] Executing tasks + +main: + [delete] Deleting directory /Users/jhyde/open1/hive/itests/qtest/target/tmp + [delete] Deleting directory /Users/jhyde/open1/hive/itests/qtest/target/testconf + [delete] Deleting directory /Users/jhyde/open1/hive/itests/qtest/target/warehouse + [mkdir] Created dir: /Users/jhyde/open1/hive/itests/qtest/target/tmp + [mkdir] Created dir: /Users/jhyde/open1/hive/itests/qtest/target/warehouse + [mkdir] Created dir: /Users/jhyde/open1/hive/itests/qtest/target/testconf + [copy] Copying 19 files to /Users/jhyde/open1/hive/itests/qtest/target/testconf +[INFO] Executed tasks +[INFO] +[INFO] --- maven-compiler-plugin:3.6.1:testCompile (default-testCompile) @ hive-it-qfile --- +[INFO] Nothing to compile - all classes are up to date +[INFO] +[INFO] --- maven-surefire-plugin:2.20.1:test (default-test) @ hive-it-qfile --- +[INFO] +[INFO] ------------------------------------------------------- +[INFO] T E S T S +[INFO] ------------------------------------------------------- +[INFO] Running org.apache.hadoop.hive.cli.TestCliDriver +[ERROR] Tests run: 2254, Failures: 25, Errors: 0, Skipped: 0, Time elapsed: 23,630.246 s <<< FAILURE! - in org.apache.hadoop.hive.cli.TestCliDriver +[ERROR] testCliDriver[acid_table_stats](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 27.601 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing acid_table_stats.q +98c98 +< totalSize 3978 +--- +> totalSize 3950 +136c136 +< Statistics: Num rows: 82 Data size: 39780 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 81 Data size: 39500 Basic stats: COMPLETE Column stats: NONE +138c138 +< Statistics: Num rows: 82 Data size: 39780 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 81 Data size: 39500 Basic stats: COMPLETE Column stats: NONE +213c213 +< totalSize 3978 +--- +> totalSize 3950 +264c264 +< totalSize 3978 +--- +> totalSize 3950 +389c389 +< totalSize 7958 +--- +> totalSize 7904 +436c436 +< totalSize 7958 +--- +> totalSize 7904 + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[autoColumnStats_4](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 8.289 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing autoColumnStats_4.q +200c200 +< totalSize 1852 +--- +> totalSize 1798 +244c244 +< totalSize 3036 +--- +> totalSize 2909 + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[auto_sortmerge_join_2](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 7.907 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing auto_sortmerge_join_2.q +1101,1103d1100 +< Hive Runtime Error: Map local work exhausted memory +< FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +< ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[basicstat_partval](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 4.105 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 1 running " +insert into p1 partition(p='A') values (2),(3)" fname=basicstat_partval.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[compustat_avro](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.326 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 1 running " + +dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/" fname=compustat_avro.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[dbtxnmgr_showlocks](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 5.17 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing dbtxnmgr_showlocks.q +73c73 +< 5 default partitioned_acid_table p=abc MINOR initiated --- --- --- --- +--- +> 1 default partitioned_acid_table p=abc MINOR initiated --- --- --- --- + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[decimal_3](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 5.468 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key" fname=decimal_3.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[decimal_5](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 2.282 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key" fname=decimal_5.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[groupby_duplicate_key](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 4.387 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing groupby_duplicate_key.q +8c8,9 +< Stage-0 is a root stage +--- +> Stage-1 is a root stage +> Stage-0 depends on stages: Stage-1 +10a12,50 +> Stage: Stage-1 +> Map Reduce +> Map Operator Tree: +> TableScan +> alias: src +> Row Limit Per Split: 10 +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Select Operator +> expressions: key (type: string) +> outputColumnNames: key +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Group By Operator +> keys: key (type: string), '' (type: string), '' (type: string) +> mode: hash +> outputColumnNames: _col0, _col1, _col2 +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Reduce Output Operator +> key expressions: _col0 (type: string), '' (type: string) +> sort order: ++ +> Map-reduce partition columns: _col0 (type: string), '' (type: string) +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Reduce Operator Tree: +> Group By Operator +> keys: KEY._col0 (type: string), '' (type: string), '' (type: string) +> mode: mergepartial +> outputColumnNames: _col0, _col1, _col2 +> Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE +> Select Operator +> expressions: _col0 (type: string), '' (type: string), '' (type: string) +> outputColumnNames: _col0, _col1, _col2 +> Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE +> File Output Operator +> compressed: false +> Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE +> table: +> +Output was too long and had to be truncated... + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[having2](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 1.013 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 10025 running " + +explain +SELECT distinct s1.customer_name as x, s1.customer_name as y +FROM default.testv1_staples s1 join default.src s2 on s1.customer_name = s2.key +HAVING ( +(SUM(s1.customer_balance) <= 4074689.000000041) +AND (AVG(s1.discount) <= 822) +AND (COUNT(s2.value) > 4) +)" fname=having2.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[mapjoin_distinct](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.725 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " + +FROM srcpart c +JOIN srcpart d +ON ( c.key=d.key AND c.ds='2008-04-08' AND d.ds='2008-04-08') +SELECT /*+ MAPJOIN(d) */ DISTINCT c.value as value order by value limit 10" fname=mapjoin_distinct.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[mapjoin_hook](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 16.876 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing mapjoin_hook.q +43,46c43 +< Hive Runtime Error: Map local work exhausted memory +< FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +< ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask +< [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 0 CONVERTED_MAPJOIN_LOCAL: 1 BACKUP_COMMON_JOIN: 1 +--- +> [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 1 CONVERTED_MAPJOIN_LOCAL: 1 BACKUP_COMMON_JOIN: 0 +49c46 +< RUN: Stage-1:MAPRED +--- +> RUN: Stage-5:MAPRED +61,64c58 +< Hive Runtime Error: Map local work exhausted memory +< FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +< ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask +< [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 0 CONVERTED_MAPJOIN_LOCAL: 2 BACKUP_COMMON_JOIN: 2 +--- +> [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 1 CONVERTED_MAPJOIN_LOCAL: 2 BACKUP_COMMON_JOIN: 1 +70c64 +< RUN: Stage-2:MAPRED +--- +> RUN: Stage-7:MAPRED + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[mm_all](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.538 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 2" fname=mm_all.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[mm_buckets](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.641 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 2" fname=mm_buckets.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[mm_conversions](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.63 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 1" fname=mm_conversions.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[mm_cttas](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.653 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 2" fname=mm_cttas.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[parquet_join](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 0.444 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " +insert into table staging select distinct key, value from src order by key limit 2" fname=parquet_join.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[row__id](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 10.649 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing row__id.q +65c65 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +69c69 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +73c73 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +78c78 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +81c81 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +107,109c107,109 +< 709 +< 710 +< 711 +--- +> 3 +> 4 +> 5 +126c126 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +129c129 +< Statistics: Num rows: 39 Data size: 9745 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 37 Data size: 9300 Basic stats: COMPLETE Column stats: NONE +133c133 +< Statistics: Num rows: 39 Data size: 9745 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 37 Data size: 9300 Basic stats: COMPLETE Column stats: NONE +136c136 +< Statistics: Num rows: 39 Data size: 9745 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 37 Data size: 9300 Basic stats: COMPLETE Column stats: NONE +161a162 +> 3 + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[udaf_context_ngrams](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 8.868 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing udaf_context_ngrams.q +34c34 +< [{"ngram":["was"],"estfrequency":3.0},{"ngram":["let"],"estfrequency":1.0},{"ngram":["would"],"estfrequency":1.0}] +--- +> [{"ngram":["was"],"estfrequency":17.0},{"ngram":["had"],"estfrequency":16.0},{"ngram":["thought"],"estfrequency":13.0},{"ngram":["could"],"estfrequency":9.0},{"ngram":["would"],"estfrequency":7.0},{"ngram":["lay"],"estfrequency":5.0},{"ngram":["did"],"estfrequency":4.0},{"ngram":["felt"],"estfrequency":4.0},{"ngram":["looked"],"estfrequency":4.0},{"ngram":["s"],"estfrequency":4.0},{"ngram":["wanted"],"estfrequency":4.0},{"ngram":["finally"],"estfrequency":3.0},{"ngram":["lifted"],"estfrequency":3.0},{"ngram":["must"],"estfrequency":3.0},{"ngram":["needed"],"estfrequency":3.0},{"ngram":["slid"],"estfrequency":3.0},{"ngram":["told"],"estfrequency":3.0},{"ngram":["tried"],"estfrequency":3.0},{"ngram":["also"],"estfrequency":2.0},{"ngram":["always"],"estfrequency":2.0},{"ngram":["began"],"estfrequency":2.0},{"ngram":["didn't"],"estfrequency":2.0},{"ngram":["do"],"estfrequency":2.0},{"ngram":["drew"],"estfrequency":2.0},{"ngram":["found"],"estfrequency":2.0},{"ngram":["is"],"estfrequency":2.0},{"ngram":["let"],"estfrequency":2.0},{"ngram":["made"],"estfrequency":2.0},{"ngram":["really"],"estfrequency":2.0},{"ngram":["reported"],"estfrequency":2.0},{"ngram":["threw"],"estfrequency":2.0},{"ngram":["touched"],"estfrequency":2.0},{"ngram":["wouldn't"],"estfrequency":2.0},{"ngram":["allowed"],"estfrequency":1.0},{"ngram":["almost"],"estfrequency":1.0},{"ngram":["became"],"estfrequency":1.0},{"ngram":["called"],"estfrequency":1.0},{"ngram":["caught"],"estfrequency":1.0},{"ngram":["chose"],"estfrequency":1.0},{"ngram":["confined"],"estfrequency":1.0},{"ngram":["cut"],"estfrequency":1.0},{"ngram":["denied"],"estfrequency":1.0},{"ngram":["directed"],"estfrequency":1.0},{"ngram":["discovered"],"estfrequency":1.0},{"ngram":["failed"],"estfrequency":1.0},{"ngram":["have"],"estfrequency":1.0},{"ngram":["heard"],"estfrequency":1.0},{"ngram":["hit"],"estfrequ +Output was too long and had to be truncated... + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[udaf_corr](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 10.528 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing udaf_corr.q +100c100 +< 0.6633880657639324 +--- +> 0.6633880657639326 + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[udaf_histogram_numeric](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 13.42 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing udaf_histogram_numeric.q +27c27 +< [{"x":14.17857142857143,"y":28.0},{"x":41.588235294117645,"y":17.0},{"x":69.3,"y":20.0},{"x":93.50000000000001,"y":26.0},{"x":122.99999999999999,"y":24.0},{"x":144.44444444444446,"y":18.0},{"x":169.50000000000006,"y":34.0},{"x":197.51351351351352,"y":37.0},{"x":226.02857142857147,"y":35.0},{"x":254.15,"y":20.0},{"x":280.51724137931024,"y":29.0},{"x":309.47999999999996,"y":25.0},{"x":328.27777777777777,"y":18.0},{"x":348.4444444444444,"y":18.0},{"x":374.3809523809523,"y":21.0},{"x":401.9189189189191,"y":37.0},{"x":427.76,"y":25.0},{"x":453.47826086956513,"y":23.0},{"x":472.9615384615384,"y":26.0},{"x":491.89473684210526,"y":19.0}] +--- +> [{"x":9.761904761904763,"y":21.0},{"x":33.84210526315789,"y":19.0},{"x":62.75000000000001,"y":20.0},{"x":90.90322580645162,"y":31.0},{"x":122.91666666666667,"y":24.0},{"x":146.33333333333334,"y":21.0},{"x":170.70967741935485,"y":31.0},{"x":194.3571428571428,"y":28.0},{"x":214.84615384615384,"y":26.0},{"x":235.08695652173907,"y":23.0},{"x":257.80000000000007,"y":15.0},{"x":281.0333333333333,"y":30.0},{"x":298.0,"y":1.0},{"x":313.0000000000001,"y":29.0},{"x":339.5925925925926,"y":27.0},{"x":372.49999999999983,"y":24.0},{"x":402.23684210526324,"y":38.0},{"x":430.6896551724138,"y":29.0},{"x":462.32352941176464,"y":34.0},{"x":487.72413793103453,"y":29.0}] + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[vector_decimal_3](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 7.242 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key" fname=vector_decimal_3.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[vector_decimal_5](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 3.989 s <<< FAILURE! +java.lang.AssertionError: +Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key" fname=vector_decimal_5.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failed(QTestUtil.java:2262) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:176) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[vector_null_projection](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 8.153 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing vector_null_projection.q +158,159c158,160 +< File Output Operator +< compressed: false +--- +> Select Operator +> expressions: null (type: void) +> outputColumnNames: _col0 +161,164c162,168 +< table: +< input format: org.apache.hadoop.mapred.SequenceFileInputFormat +< output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +< serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +--- +> File Output Operator +> compressed: false +> Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE +> table: +> input format: org.apache.hadoop.mapred.SequenceFileInputFormat +> output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +> serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[ERROR] testCliDriver[vectorization_limit](org.apache.hadoop.hive.cli.TestCliDriver) Time elapsed: 13.771 s <<< FAILURE! +java.lang.AssertionError: +Client Execution succeeded but contained differences (error code = 1) after executing vectorization_limit.q +348c348 +< outputColumnNames: _col0 +--- +> outputColumnNames: ctinyint +354,363c354,371 +< Limit +< Number of rows: 20 +< Limit Vectorization: +< className: VectorLimitOperator +< native: true +< Statistics: Num rows: 20 Data size: 4300 Basic stats: COMPLETE Column stats: NONE +< File Output Operator +< compressed: false +< File Sink Vectorization: +< className: VectorFileSinkOperator +--- +> Group By Operator +> Group By Vectorization: +> className: VectorGroupByOperator +> groupByMode: HASH +> keyExpressions: col 0:tinyint +> native: false +> vectorProcessingMode: HASH +> projectedOutputColumnNums: [] +> keys: ctinyint (type: tinyint) +> mode: hash +> outputColumnNames: _col0 +> Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE +> Reduce Output Operator +> key expressions: _col0 (type: tinyint) +> sort order: + +> Map-reduce partition columns: _col0 (type: tinyint) +> Reduce Sink Vectorization: +> className: VectorReduceSinkOperator +365,369c373,376 +< Statistics: Num rows: 20 Data size: 4300 Basic stats: COMPLETE Column stats: NONE +< table: +< input format: org.apache.hadoop.mapred.SequenceFileInputFormat +< output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +< serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +--- +> nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, +Output was too long and had to be truncated... + at org.junit.Assert.fail(Assert.java:88) + at org.apache.hadoop.hive.ql.QTestUtil.failedDiff(QTestUtil.java:2278) + at org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:183) + at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) + at org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59) + at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:483) + at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) + at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) + at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) + at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) + at org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) + at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.junit.runners.Suite.runChild(Suite.java:127) + at org.junit.runners.Suite.runChild(Suite.java:26) + at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) + at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) + at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) + at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) + at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) + at org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73) + at org.junit.rules.RunRules.evaluate(RunRules.java:20) + at org.junit.runners.ParentRunner.run(ParentRunner.java:309) + at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:369) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:275) + at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:239) + at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:160) + at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:373) + at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:334) + at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:119) + at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:407) + +[INFO] +[INFO] Results: +[INFO] +[ERROR] Failures: +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing acid_table_stats.q +98c98 +< totalSize 3978 +--- +> totalSize 3950 +136c136 +< Statistics: Num rows: 82 Data size: 39780 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 81 Data size: 39500 Basic stats: COMPLETE Column stats: NONE +138c138 +< Statistics: Num rows: 82 Data size: 39780 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 81 Data size: 39500 Basic stats: COMPLETE Column stats: NONE +213c213 +< totalSize 3978 +--- +> totalSize 3950 +264c264 +< totalSize 3978 +--- +> totalSize 3950 +389c389 +< totalSize 7958 +--- +> totalSize 7904 +436c436 +< totalSize 7958 +--- +> totalSize 7904 + +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing autoColumnStats_4.q +200c200 +< totalSize 1852 +--- +> totalSize 1798 +244c244 +< totalSize 3036 +--- +> totalSize 2909 + +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing auto_sortmerge_join_2.q +1101,1103d1100 +< Hive Runtime Error: Map local work exhausted memory +< FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +< ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask + +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 1 running " +insert into p1 partition(p='A') values (2),(3)" fname=basicstat_partval.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 1 running " + +dfs -cp ${system:hive.root}data/files/grad.avsc ${system:test.tmp.dir}/" fname=compustat_avro.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing dbtxnmgr_showlocks.q +73c73 +< 5 default partitioned_acid_table p=abc MINOR initiated --- --- --- --- +--- +> 1 default partitioned_acid_table p=abc MINOR initiated --- --- --- --- + +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key" fname=decimal_3.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key" fname=decimal_5.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing groupby_duplicate_key.q +8c8,9 +< Stage-0 is a root stage +--- +> Stage-1 is a root stage +> Stage-0 depends on stages: Stage-1 +10a12,50 +> Stage: Stage-1 +> Map Reduce +> Map Operator Tree: +> TableScan +> alias: src +> Row Limit Per Split: 10 +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Select Operator +> expressions: key (type: string) +> outputColumnNames: key +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Group By Operator +> keys: key (type: string), '' (type: string), '' (type: string) +> mode: hash +> outputColumnNames: _col0, _col1, _col2 +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Reduce Output Operator +> key expressions: _col0 (type: string), '' (type: string) +> sort order: ++ +> Map-reduce partition columns: _col0 (type: string), '' (type: string) +> Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE +> Reduce Operator Tree: +> Group By Operator +> keys: KEY._col0 (type: string), '' (type: string), '' (type: string) +> mode: mergepartial +> outputColumnNames: _col0, _col1, _col2 +> Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE +> Select Operator +> expressions: _col0 (type: string), '' (type: string), '' (type: string) +> outputColumnNames: _col0, _col1, _col2 +> Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE +> File Output Operator +> compressed: false +> Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE +> table: +> +Output was too long and had to be truncated... +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 10025 running " + +explain +SELECT distinct s1.customer_name as x, s1.customer_name as y +FROM default.testv1_staples s1 join default.src s2 on s1.customer_name = s2.key +HAVING ( +(SUM(s1.customer_balance) <= 4074689.000000041) +AND (AVG(s1.discount) <= 822) +AND (COUNT(s2.value) > 4) +)" fname=having2.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " + +FROM srcpart c +JOIN srcpart d +ON ( c.key=d.key AND c.ds='2008-04-08' AND d.ds='2008-04-08') +SELECT /*+ MAPJOIN(d) */ DISTINCT c.value as value order by value limit 10" fname=mapjoin_distinct.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing mapjoin_hook.q +43,46c43 +< Hive Runtime Error: Map local work exhausted memory +< FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +< ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask +< [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 0 CONVERTED_MAPJOIN_LOCAL: 1 BACKUP_COMMON_JOIN: 1 +--- +> [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 1 CONVERTED_MAPJOIN_LOCAL: 1 BACKUP_COMMON_JOIN: 0 +49c46 +< RUN: Stage-1:MAPRED +--- +> RUN: Stage-5:MAPRED +61,64c58 +< Hive Runtime Error: Map local work exhausted memory +< FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +< ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask +< [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 0 CONVERTED_MAPJOIN_LOCAL: 2 BACKUP_COMMON_JOIN: 2 +--- +> [MapJoinCounter PostHook] COMMON_JOIN: 0 HINTED_MAPJOIN: 0 HINTED_MAPJOIN_LOCAL: 0 CONVERTED_MAPJOIN: 1 CONVERTED_MAPJOIN_LOCAL: 2 BACKUP_COMMON_JOIN: 1 +70c64 +< RUN: Stage-2:MAPRED +--- +> RUN: Stage-7:MAPRED + +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 2" fname=mm_all.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 2" fname=mm_buckets.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 1" fname=mm_conversions.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " +insert into table intermediate partition(p='455') select distinct key from src where key >= 0 order by key desc limit 2" fname=mm_cttas.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " +insert into table staging select distinct key, value from src order by key limit 2" fname=parquet_join.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing row__id.q +65c65 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +69c69 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +73c73 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +78c78 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +81c81 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +107,109c107,109 +< 709 +< 710 +< 711 +--- +> 3 +> 4 +> 5 +126c126 +< Statistics: Num rows: 78 Data size: 19490 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 74 Data size: 18600 Basic stats: COMPLETE Column stats: NONE +129c129 +< Statistics: Num rows: 39 Data size: 9745 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 37 Data size: 9300 Basic stats: COMPLETE Column stats: NONE +133c133 +< Statistics: Num rows: 39 Data size: 9745 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 37 Data size: 9300 Basic stats: COMPLETE Column stats: NONE +136c136 +< Statistics: Num rows: 39 Data size: 9745 Basic stats: COMPLETE Column stats: NONE +--- +> Statistics: Num rows: 37 Data size: 9300 Basic stats: COMPLETE Column stats: NONE +161a162 +> 3 + +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing udaf_context_ngrams.q +34c34 +< [{"ngram":["was"],"estfrequency":3.0},{"ngram":["let"],"estfrequency":1.0},{"ngram":["would"],"estfrequency":1.0}] +--- +> [{"ngram":["was"],"estfrequency":17.0},{"ngram":["had"],"estfrequency":16.0},{"ngram":["thought"],"estfrequency":13.0},{"ngram":["could"],"estfrequency":9.0},{"ngram":["would"],"estfrequency":7.0},{"ngram":["lay"],"estfrequency":5.0},{"ngram":["did"],"estfrequency":4.0},{"ngram":["felt"],"estfrequency":4.0},{"ngram":["looked"],"estfrequency":4.0},{"ngram":["s"],"estfrequency":4.0},{"ngram":["wanted"],"estfrequency":4.0},{"ngram":["finally"],"estfrequency":3.0},{"ngram":["lifted"],"estfrequency":3.0},{"ngram":["must"],"estfrequency":3.0},{"ngram":["needed"],"estfrequency":3.0},{"ngram":["slid"],"estfrequency":3.0},{"ngram":["told"],"estfrequency":3.0},{"ngram":["tried"],"estfrequency":3.0},{"ngram":["also"],"estfrequency":2.0},{"ngram":["always"],"estfrequency":2.0},{"ngram":["began"],"estfrequency":2.0},{"ngram":["didn't"],"estfrequency":2.0},{"ngram":["do"],"estfrequency":2.0},{"ngram":["drew"],"estfrequency":2.0},{"ngram":["found"],"estfrequency":2.0},{"ngram":["is"],"estfrequency":2.0},{"ngram":["let"],"estfrequency":2.0},{"ngram":["made"],"estfrequency":2.0},{"ngram":["really"],"estfrequency":2.0},{"ngram":["reported"],"estfrequency":2.0},{"ngram":["threw"],"estfrequency":2.0},{"ngram":["touched"],"estfrequency":2.0},{"ngram":["wouldn't"],"estfrequency":2.0},{"ngram":["allowed"],"estfrequency":1.0},{"ngram":["almost"],"estfrequency":1.0},{"ngram":["became"],"estfrequency":1.0},{"ngram":["called"],"estfrequency":1.0},{"ngram":["caught"],"estfrequency":1.0},{"ngram":["chose"],"estfrequency":1.0},{"ngram":["confined"],"estfrequency":1.0},{"ngram":["cut"],"estfrequency":1.0},{"ngram":["denied"],"estfrequency":1.0},{"ngram":["directed"],"estfrequency":1.0},{"ngram":["discovered"],"estfrequency":1.0},{"ngram":["failed"],"estfrequency":1.0},{"ngram":["have"],"estfrequency":1.0},{"ngram":["heard"],"estfrequency":1.0},{"ngram":["hit"],"estfrequ +Output was too long and had to be truncated... +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing udaf_corr.q +100c100 +< 0.6633880657639324 +--- +> 0.6633880657639326 + +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing udaf_histogram_numeric.q +27c27 +< [{"x":14.17857142857143,"y":28.0},{"x":41.588235294117645,"y":17.0},{"x":69.3,"y":20.0},{"x":93.50000000000001,"y":26.0},{"x":122.99999999999999,"y":24.0},{"x":144.44444444444446,"y":18.0},{"x":169.50000000000006,"y":34.0},{"x":197.51351351351352,"y":37.0},{"x":226.02857142857147,"y":35.0},{"x":254.15,"y":20.0},{"x":280.51724137931024,"y":29.0},{"x":309.47999999999996,"y":25.0},{"x":328.27777777777777,"y":18.0},{"x":348.4444444444444,"y":18.0},{"x":374.3809523809523,"y":21.0},{"x":401.9189189189191,"y":37.0},{"x":427.76,"y":25.0},{"x":453.47826086956513,"y":23.0},{"x":472.9615384615384,"y":26.0},{"x":491.89473684210526,"y":19.0}] +--- +> [{"x":9.761904761904763,"y":21.0},{"x":33.84210526315789,"y":19.0},{"x":62.75000000000001,"y":20.0},{"x":90.90322580645162,"y":31.0},{"x":122.91666666666667,"y":24.0},{"x":146.33333333333334,"y":21.0},{"x":170.70967741935485,"y":31.0},{"x":194.3571428571428,"y":28.0},{"x":214.84615384615384,"y":26.0},{"x":235.08695652173907,"y":23.0},{"x":257.80000000000007,"y":15.0},{"x":281.0333333333333,"y":30.0},{"x":298.0,"y":1.0},{"x":313.0000000000001,"y":29.0},{"x":339.5925925925926,"y":27.0},{"x":372.49999999999983,"y":24.0},{"x":402.23684210526324,"y":38.0},{"x":430.6896551724138,"y":29.0},{"x":462.32352941176464,"y":34.0},{"x":487.72413793103453,"y":29.0}] + +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key" fname=vector_decimal_3.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client execution failed with error code = 40000 running " + +SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key" fname=vector_decimal_5.q +See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs. +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing vector_null_projection.q +158,159c158,160 +< File Output Operator +< compressed: false +--- +> Select Operator +> expressions: null (type: void) +> outputColumnNames: _col0 +161,164c162,168 +< table: +< input format: org.apache.hadoop.mapred.SequenceFileInputFormat +< output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +< serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +--- +> File Output Operator +> compressed: false +> Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE +> table: +> input format: org.apache.hadoop.mapred.SequenceFileInputFormat +> output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +> serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + +[ERROR] TestCliDriver.testCliDriver:59 Client Execution succeeded but contained differences (error code = 1) after executing vectorization_limit.q +348c348 +< outputColumnNames: _col0 +--- +> outputColumnNames: ctinyint +354,363c354,371 +< Limit +< Number of rows: 20 +< Limit Vectorization: +< className: VectorLimitOperator +< native: true +< Statistics: Num rows: 20 Data size: 4300 Basic stats: COMPLETE Column stats: NONE +< File Output Operator +< compressed: false +< File Sink Vectorization: +< className: VectorFileSinkOperator +--- +> Group By Operator +> Group By Vectorization: +> className: VectorGroupByOperator +> groupByMode: HASH +> keyExpressions: col 0:tinyint +> native: false +> vectorProcessingMode: HASH +> projectedOutputColumnNums: [] +> keys: ctinyint (type: tinyint) +> mode: hash +> outputColumnNames: _col0 +> Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE +> Reduce Output Operator +> key expressions: _col0 (type: tinyint) +> sort order: + +> Map-reduce partition columns: _col0 (type: tinyint) +> Reduce Sink Vectorization: +> className: VectorReduceSinkOperator +365,369c373,376 +< Statistics: Num rows: 20 Data size: 4300 Basic stats: COMPLETE Column stats: NONE +< table: +< input format: org.apache.hadoop.mapred.SequenceFileInputFormat +< output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +< serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +--- +> nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, +Output was too long and had to be truncated... +[INFO] +[ERROR] Tests run: 2254, Failures: 25, Errors: 0, Skipped: 0 +[INFO] +[INFO] ------------------------------------------------------------------------ +[INFO] Reactor Summary: +[INFO] +[INFO] Hive Integration - Parent .......................... SUCCESS [ 1.097 s] +[INFO] Hive Integration - Custom Serde .................... SUCCESS [ 7.542 s] +[INFO] Hive Integration - Custom udfs ..................... SUCCESS [ 1.672 s] +[INFO] Hive Integration - Custom UDFs - udf-classloader-util SUCCESS [ 1.718 s] +[INFO] Hive Integration - Custom UDFs - udf-classloader-udf1 SUCCESS [ 1.459 s] +[INFO] Hive Integration - Custom UDFs - udf-classloader-udf2 SUCCESS [ 1.349 s] +[INFO] Hive Integration - Custom UDFs - udf-vectorized-badexample SUCCESS [ 1.423 s] +[INFO] Hive Integration - HCatalog Unit Tests ............. SUCCESS [ 8.837 s] +[INFO] Hive Integration - QFile Druid Tests ............... SUCCESS [ 3.515 s] +[INFO] Hive Integration - Testing Utilities ............... SUCCESS [ 12.740 s] +[INFO] Hive Integration - Unit Tests ...................... SUCCESS [ 14.387 s] +[INFO] Hive Integration - Blobstore Tests ................. SUCCESS [ 7.907 s] +[INFO] Hive Integration - Test Serde ...................... SUCCESS [ 1.030 s] +[INFO] Hive Integration - QFile Tests ..................... FAILURE [ 06:33 h] +[INFO] Hive Integration - QFile Accumulo Tests ............ SKIPPED +[INFO] JMH benchmark: Hive ................................ SKIPPED +[INFO] Hive Integration - Unit Tests - Hadoop 2 ........... SKIPPED +[INFO] Hive Integration - Unit Tests with miniKdc ......... SKIPPED +[INFO] Hive Integration - QFile Spark Tests ............... SKIPPED +[INFO] ------------------------------------------------------------------------ +[INFO] BUILD FAILURE +[INFO] ------------------------------------------------------------------------ +[INFO] Total time: 06:35 h +[INFO] Finished at: 2018-02-05T20:08:28-08:00 +[INFO] Final Memory: 137M/697M +[INFO] ------------------------------------------------------------------------ +[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.20.1:test (default-test) on project hive-it-qfile: There are test failures. +[ERROR] +[ERROR] Please refer to /Users/jhyde/open1/hive/itests/qtest/target/surefire-reports for the individual test results. +[ERROR] Please refer to dump files (if any exist) [date]-jvmRun[N].dump, [date].dumpstream and [date]-jvmRun[N].dumpstream. +[ERROR] -> [Help 1] +[ERROR] +[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. +[ERROR] Re-run Maven using the -X switch to enable full debug logging. +[ERROR] +[ERROR] For more information about the errors and possible solutions, please read the following articles: +[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException +[ERROR] +[ERROR] After correcting the problems, you can resume the build with the command +[ERROR] mvn -rf :hive-it-qfile +HW10571:itests jhyde$ \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 46d876d..267093c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -94,7 +94,7 @@ INVALID_MAPINDEX_CONSTANT(10031, "Non-constant expression for map indexes not supported"), INVALID_MAPINDEX_TYPE(10032, "MAP key type does not match index expression type"), NON_COLLECTION_TYPE(10033, "[] not valid on non-collection types"), - SELECT_DISTINCT_WITH_GROUPBY(10034, "SELECT DISTINCT and GROUP BY can not be in the same query"), + @Deprecated SELECT_DISTINCT_WITH_GROUPBY(10034, "SELECT DISTINCT and GROUP BY can not be in the same query"), COLUMN_REPEATED_IN_PARTITIONING_COLS(10035, "Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES(10036, "Duplicate column name:"), INVALID_BUCKET_NUMBER(10037, "Bucket number should be bigger than zero"), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 8e0a454..0dc6dbe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -3231,6 +3231,18 @@ private AggInfo getHiveAggInfo(ASTNode aggAst, int aggFnLstArgIndx, RowResolver return aInfo; } + private void expandSelectStar(QBParseInfo qbp, String dest, RowResolver inputRR, RelNode srcRel) { + final ASTNode selExprList = qbp.getSelForClause(dest); + if (selExprList.getToken().getType() == HiveParser.TOK_SELECTDI + && selExprList.getChildCount() == 1 && selExprList.getChild(0).getChildCount() == 1) { + ASTNode node = (ASTNode) selExprList.getChild(0).getChild(0); + if (node.getToken().getType() == HiveParser.TOK_ALLCOLREF) { + ASTNode newSelExprList = genSelectDIAST(inputRR); + qbp.setSelExprForClause(dest, newSelExprList); + } + } + } + /** * Generate GB plan. * @@ -3255,16 +3267,18 @@ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException // SEL%SEL% rule. ASTNode selExprList = qb.getParseInfo().getSelForClause(detsClauseName); SubQueryUtils.checkForTopLevelSubqueries(selExprList); - if (selExprList.getToken().getType() == HiveParser.TOK_SELECTDI - && selExprList.getChildCount() == 1 && selExprList.getChild(0).getChildCount() == 1) { - ASTNode node = (ASTNode) selExprList.getChild(0).getChild(0); - if (node.getToken().getType() == HiveParser.TOK_ALLCOLREF) { - // As we said before, here we use genSelectLogicalPlan to rewrite AllColRef - srcRel = genSelectLogicalPlan(qb, srcRel, srcRel, null, null, true).getKey(); - RowResolver rr = this.relToHiveRR.get(srcRel); - qbp.setSelExprForClause(detsClauseName, SemanticAnalyzer.genSelectDIAST(rr)); - } - } + expandSelectStar(qbp, detsClauseName, relToHiveRR.get(srcRel), srcRel); + +// if (selExprList.getToken().getType() == HiveParser.TOK_SELECTDI +// && selExprList.getChildCount() == 1 && selExprList.getChild(0).getChildCount() == 1) { +// ASTNode node = (ASTNode) selExprList.getChild(0).getChild(0); +// if (node.getToken().getType() == HiveParser.TOK_ALLCOLREF) { +// // As we said before, here we use genSelectLogicalPlan to rewrite AllColRef +// srcRel = genSelectLogicalPlan(qb, srcRel, srcRel, null, null, true).getKey(); +// RowResolver rr = this.relToHiveRR.get(srcRel); +// qbp.setSelExprForClause(detsClauseName, SemanticAnalyzer.genSelectDIAST(rr)); +// } +// } // Select DISTINCT + windowing; GBy handled by genSelectForWindowing if (selExprList.getToken().getType() == HiveParser.TOK_SELECTDI && @@ -3392,8 +3406,41 @@ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException this.relToHiveRR.put(gbRel, groupByOutputRowResolver); } - return gbRel; + return gbRel; + } + + private Pair genGBSelectDistinctPlan( + QBParseInfo qbp, String dest, Pair srcNodeRR) throws SemanticException { + final RelNode srcRel = srcNodeRR.left; + + // This comes from genSelectLogicalPlan, must be a project + // assert srcRel instanceof HiveProject; + + RowResolver inputRR = srcNodeRR.right; + if (inputRR == null) { + inputRR = relToHiveRR.get(srcRel); } + final RowResolver outputRR = inputRR; + + final List groupSetPositions = Lists.newArrayList(); + final RelDataType inputRT = srcRel.getRowType(); + int idx = 0; + + while (idx < inputRT.getFieldCount()) { + groupSetPositions.add(idx); + ++idx; + } + + HiveAggregate distAgg = new HiveAggregate( + cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), + srcRel, + ImmutableBitSet.of(groupSetPositions), + null, new ArrayList()); + relToHiveRR.put(distAgg, outputRR); + relToHiveColNameCalcitePosMap.put(distAgg, + relToHiveColNameCalcitePosMap.get(srcRel)); + return new Pair(distAgg, outputRR); + } /** * Generate OB RelNode and input Select RelNode that should be used to @@ -3943,6 +3990,23 @@ private void setQueryHints(QB qb) throws SemanticException { } } + + private Pair genSelectLogicalPlan(QB qb, RelNode srcRel, RelNode starSrcRel, + ImmutableMap outerNameToPosMap, RowResolver outerRR, boolean isAllColRefRewrite) + throws SemanticException { + QBParseInfo qbp = getQBParseInfo(qb); + String selClauseName = qbp.getClauseNames().iterator().next(); + ASTNode selExprList = qbp.getSelForClause(selClauseName); + + Pair retNodeRR = internalGenSelectLogicalPlan( + qb, srcRel, starSrcRel, outerNameToPosMap, outerRR, isAllColRefRewrite); + + if (selExprList.getType() == HiveParser.TOK_SELECTDI) { + retNodeRR = genGBSelectDistinctPlan(qbp, selClauseName, retNodeRR); + } + return retNodeRR; + } + /** * NOTE: there can only be one select caluse since we don't handle multi * destination insert. @@ -3961,7 +4025,7 @@ private void setQueryHints(QB qb) throws SemanticException { * @return RelNode: the select relnode RowResolver: i.e., originalRR, the RR after select when there is an order by. * @throws SemanticException */ - private Pair genSelectLogicalPlan(QB qb, RelNode srcRel, RelNode starSrcRel, + private Pair internalGenSelectLogicalPlan(QB qb, RelNode srcRel, RelNode starSrcRel, ImmutableMap outerNameToPosMap, RowResolver outerRR, boolean isAllColRefRewrite) throws SemanticException { // 0. Generate a Select Node for Windowing @@ -4234,7 +4298,13 @@ private void setQueryHints(QB qb) throws SemanticException { // TODO: support unselected columns in genericUDTF and windowing functions. // We examine the order by in this query block and adds in column needed // by order by in select list. - if (obAST != null && !(selForWindow != null && selExprList.getToken().getType() == HiveParser.TOK_SELECTDI) && !isAllColRefRewrite) { + // + // If DISTINCT is present, it is not possible to ORDER BY unselected + // columns, and in fact adding all columns would change the behavior of + // DISTINCT, so we bypass this logic. + if (obAST != null + && selExprList.getToken().getType() != HiveParser.TOK_SELECTDI + && !isAllColRefRewrite) { // 1. OB Expr sanity test // in strict mode, in the presence of order by, limit must be // specified diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 5c96653..a6b420f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -1497,10 +1497,10 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plan if (qbp.getJoinExpr() != null) { queryProperties.setHasJoinFollowedByGroupBy(true); } - if (qbp.getSelForClause(ctx_1.dest).getToken().getType() == HiveParser.TOK_SELECTDI) { - throw new SemanticException(generateErrorMessage(ast, - ErrorMsg.SELECT_DISTINCT_WITH_GROUPBY.getMsg())); - } +// if (qbp.getSelForClause(ctx_1.dest).getToken().getType() == HiveParser.TOK_SELECTDI) { +// throw new SemanticException(generateErrorMessage(ast, +// ErrorMsg.SELECT_DISTINCT_WITH_GROUPBY.getMsg())); +// } qbp.setGroupByExprForClause(ctx_1.dest, ast); skipRecursion = true; @@ -3925,40 +3925,47 @@ public static long unsetBit(long bitmap, int bitIdx) { } /** - * This function is a wrapper of parseInfo.getGroupByForClause which - * automatically translates SELECT DISTINCT a,b,c to SELECT a,b,c GROUP BY - * a,b,c. + * This function returns the GBY, if present + * DISTINCT, if present, will be handled when generating the SELECT */ List getGroupByForClause(QBParseInfo parseInfo, String dest) throws SemanticException { - if (parseInfo.getSelForClause(dest).getToken().getType() == HiveParser.TOK_SELECTDI) { - ASTNode selectExprs = parseInfo.getSelForClause(dest); - List result = new ArrayList(selectExprs == null ? 0 - : selectExprs.getChildCount()); - if (selectExprs != null) { - for (int i = 0; i < selectExprs.getChildCount(); ++i) { - if (((ASTNode) selectExprs.getChild(i)).getToken().getType() == HiveParser.QUERY_HINT) { - continue; + List result; + // When *not* invoked by CalcitePlanner, return the DISTINCT as a GBY + // CBO will handle the DISTINCT in CalcitePlannerAction.genSelectLogicalPlan + // + if (!(this instanceof CalcitePlanner)) { + if (parseInfo.getSelForClause(dest).getToken().getType() == HiveParser.TOK_SELECTDI) { + ASTNode selectExprs = parseInfo.getSelForClause(dest); + result = new ArrayList(selectExprs == null ? 0 + : selectExprs.getChildCount()); + if (selectExprs != null) { + for (int i = 0; i < selectExprs.getChildCount(); ++i) { + if (((ASTNode) selectExprs.getChild(i)).getToken().getType() == HiveParser.QUERY_HINT) { + continue; + } + // table.column AS alias + ASTNode grpbyExpr = (ASTNode) selectExprs.getChild(i).getChild(0); + result.add(grpbyExpr); } - // table.column AS alias - ASTNode grpbyExpr = (ASTNode) selectExprs.getChild(i).getChild(0); - result.add(grpbyExpr); } + return result; } - return result; - } else { - ASTNode grpByExprs = parseInfo.getGroupByForClause(dest); - List result = new ArrayList(grpByExprs == null ? 0 - : grpByExprs.getChildCount()); - if (grpByExprs != null) { - for (int i = 0; i < grpByExprs.getChildCount(); ++i) { - ASTNode grpbyExpr = (ASTNode) grpByExprs.getChild(i); - if (grpbyExpr.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) { - result.add(grpbyExpr); - } + } + + // look for a true GBY + ASTNode grpByExprs = parseInfo.getGroupByForClause(dest); + if (grpByExprs != null) { + result = new ArrayList(grpByExprs.getChildCount()); + for (int i = 0; i < grpByExprs.getChildCount(); ++i) { + ASTNode grpbyExpr = (ASTNode) grpByExprs.getChild(i); + if (grpbyExpr.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) { + result.add(grpbyExpr); } } - return result; + } else { + result = new ArrayList(0); } + return result; } static String[] getColAlias(ASTNode selExpr, String defaultName, diff --git a/ql/src/test/queries/clientpositive/distinct_gby.q b/ql/src/test/queries/clientpositive/distinct_gby.q new file mode 100644 index 0000000..182ce83 --- /dev/null +++ b/ql/src/test/queries/clientpositive/distinct_gby.q @@ -0,0 +1,36 @@ +explain select distinct key from src1; + +explain select distinct * from src1; + +explain select distinct count(*) from src1 where key in (1,2,3); + +explain select distinct count(*) from src1 where key in (1,2,3) group by key; + +explain select distinct key, count(*) from src1 where key in (1,2,3) group by key; + +explain select distinct * from (select * from src1) as T; + +explain select distinct * from (select count(*) from src1) as T; + +explain select distinct * from (select * from src1 where key in (1,2,3)) as T; + +explain select distinct * from (select count(*) from src1 where key in (1,2,3)) as T; + +explain select distinct * from (select distinct count(*) from src1 where key in (1,2,3)) as T; + +explain select distinct sum(value) over () from src1; + +explain select distinct * from (select sum(value) over () from src1) as T; + +explain select distinct count(*)+1 from src1; + +explain select distinct count(*)+key from src1 group by key; + +explain select distinct count(a.value), count(b.value) from src1 a join src1 b on a.key=b.key; + +explain select distinct count(a.value), count(b.value) from src1 a join src1 b on a.key=b.key group by a.key; + +-- should not project the virtual BLOCK_OFFSET et all columns +explain select distinct * from (select distinct * from src1) as T; + + diff --git a/ql/src/test/queries/clientpositive/having2.q b/ql/src/test/queries/clientpositive/having2.q index 7b35365..be3cc0e 100644 --- a/ql/src/test/queries/clientpositive/having2.q +++ b/ql/src/test/queries/clientpositive/having2.q @@ -84,7 +84,7 @@ AND (COUNT(s2.value) > 4) ); explain -SELECT distinct s1.customer_name as x, s1.customer_name as y +SELECT distinct COUNT(*) AS c FROM default.testv1_staples s1 join default.src s2 on s1.customer_name = s2.key HAVING ( (SUM(s1.customer_balance) <= 4074689.000000041) diff --git a/ql/src/test/queries/negative/wrong_distinct1.q b/ql/src/test/queries/negative/wrong_distinct1.q deleted file mode 100755 index d92c3bb..0000000 --- a/ql/src/test/queries/negative/wrong_distinct1.q +++ /dev/null @@ -1,2 +0,0 @@ -FROM src -INSERT OVERWRITE TABLE dest1 SELECT DISTINCT src.key, substr(src.value,4,1) GROUP BY src.key diff --git a/ql/src/test/results/clientpositive/distinct_gby.q.out b/ql/src/test/results/clientpositive/distinct_gby.q.out new file mode 100644 index 0000000..cf28bf2 --- /dev/null +++ b/ql/src/test/results/clientpositive/distinct_gby.q.out @@ -0,0 +1,1078 @@ +PREHOOK: query: explain select distinct key from src1 +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct key from src1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from src1 +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from src1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: key (type: string), value (type: string), BLOCK__OFFSET__INSIDE__FILE (type: bigint), INPUT__FILE__NAME (type: string), ROW__ID (type: struct) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint), _col3 (type: string), _col4 (type: struct) + sort order: +++++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint), _col3 (type: string), _col4 (type: struct) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: bigint), KEY._col3 (type: string), KEY._col4 (type: struct) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct count(*) from src1 where key in (1,2,3) +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct count(*) from src1 where key in (1,2,3) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (UDFToDouble(key)) IN (1.0, 2.0, 3.0) (type: boolean) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct count(*) from src1 where key in (1,2,3) group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct count(*) from src1 where key in (1,2,3) group by key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (UDFToDouble(key)) IN (1.0, 2.0, 3.0) (type: boolean) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint) + outputColumnNames: _col1 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col1 (type: bigint) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: bigint) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct key, count(*) from src1 where key in (1,2,3) group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct key, count(*) from src1 where key in (1,2,3) group by key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (UDFToDouble(key)) IN (1.0, 2.0, 3.0) (type: boolean) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select * from src1) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select * from src1) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: key, value + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: key (type: string), value (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string), KEY._col1 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select count(*) from src1) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select count(*) from src1) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select * from src1 where key in (1,2,3)) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select * from src1 where key in (1,2,3)) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (UDFToDouble(key)) IN (1.0, 2.0, 3.0) (type: boolean) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: key (type: string), value (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string), KEY._col1 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select count(*) from src1 where key in (1,2,3)) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select count(*) from src1 where key in (1,2,3)) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (UDFToDouble(key)) IN (1.0, 2.0, 3.0) (type: boolean) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select distinct count(*) from src1 where key in (1,2,3)) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select distinct count(*) from src1 where key in (1,2,3)) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (UDFToDouble(key)) IN (1.0, 2.0, 3.0) (type: boolean) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Select Operator + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct sum(value) over () from src1 +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct sum(value) over () from src1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: 0 (type: int) + sort order: + + Map-reduce partition columns: 0 (type: int) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Reduce Operator Tree: + Select Operator + expressions: VALUE._col1 (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + PTF Operator + Function definitions: + Input definition + input alias: ptf_0 + output shape: _col1: string + type: WINDOWING + Windowing table definition + input alias: ptf_1 + name: windowingtablefunction + order by: 0 ASC NULLS FIRST + partition by: 0 + raw input shape: + window functions: + window function definition + alias: sum_window_0 + arguments: _col1 + name: sum + window function: GenericUDAFSumDouble + window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: sum_window_0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: double) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Map-reduce partition columns: _col0 (type: double) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: double) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select sum(value) over () from src1) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select sum(value) over () from src1) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: 0 (type: int) + sort order: + + Map-reduce partition columns: 0 (type: int) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Reduce Operator Tree: + Select Operator + expressions: VALUE._col1 (type: string) + outputColumnNames: _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + PTF Operator + Function definitions: + Input definition + input alias: ptf_0 + output shape: _col1: string + type: WINDOWING + Windowing table definition + input alias: ptf_1 + name: windowingtablefunction + order by: 0 ASC NULLS FIRST + partition by: 0 + raw input shape: + window functions: + window function definition + alias: sum_window_0 + arguments: _col1 + name: sum + window function: GenericUDAFSumDouble + window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: sum_window_0 (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: double) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Map-reduce partition columns: _col0 (type: double) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: double) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct count(*)+1 from src1 +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct count(*)+1 from src1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: (_col0 + 1) (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct count(*)+key from src1 group by key +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct count(*)+key from src1 group by key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: (UDFToDouble(_col1) + UDFToDouble(_col0)) (type: double) + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: double) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: double) + sort order: + + Map-reduce partition columns: _col0 (type: double) + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: double) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct count(a.value), count(b.value) from src1 a join src1 b on a.key=b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct count(a.value), count(b.value) from src1 a join src1 b on a.key=b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + TableScan + alias: b + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col1, _col3 + Statistics: Num rows: 27 Data size: 210 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(_col1), count(_col3) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0), count(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct count(a.value), count(b.value) from src1 a join src1 b on a.key=b.key group by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct count(a.value), count(b.value) from src1 a join src1 b on a.key=b.key group by a.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-3 depends on stages: Stage-2 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + TableScan + alias: b + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1, _col3 + Statistics: Num rows: 27 Data size: 210 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count(_col1), count(_col3) + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 27 Data size: 210 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 27 Data size: 210 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 13 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col1, _col2 + Statistics: Num rows: 13 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col1 (type: bigint), _col2 (type: bigint) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: bigint), _col1 (type: bigint) + sort order: ++ + Map-reduce partition columns: _col0 (type: bigint), _col1 (type: bigint) + Statistics: Num rows: 13 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: bigint), KEY._col1 (type: bigint) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 6 Data size: 46 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 46 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain select distinct * from (select distinct * from src1) as T +PREHOOK: type: QUERY +POSTHOOK: query: explain select distinct * from (select distinct * from src1) as T +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src1 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: key (type: string), value (type: string), BLOCK__OFFSET__INSIDE__FILE (type: bigint), INPUT__FILE__NAME (type: string), ROW__ID (type: struct) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint), _col3 (type: string), _col4 (type: struct) + sort order: +++++ + Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint), _col3 (type: string), _col4 (type: struct) + Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: bigint), KEY._col3 (type: string), KEY._col4 (type: struct) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 12 Data size: 91 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: string), _col1 (type: string), _col2 (type: bigint), _col3 (type: string), _col4 (type: struct) + mode: complete + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + diff --git a/ql/src/test/results/clientpositive/having2.q.out b/ql/src/test/results/clientpositive/having2.q.out index 67f8af8..745102d 100644 --- a/ql/src/test/results/clientpositive/having2.q.out +++ b/ql/src/test/results/clientpositive/having2.q.out @@ -485,7 +485,7 @@ STAGE PLANS: ListSink PREHOOK: query: explain -SELECT distinct s1.customer_name as x, s1.customer_name as y +SELECT distinct COUNT(*) AS c FROM default.testv1_staples s1 join default.src s2 on s1.customer_name = s2.key HAVING ( (SUM(s1.customer_balance) <= 4074689.000000041) @@ -494,7 +494,7 @@ AND (COUNT(s2.value) > 4) ) PREHOOK: type: QUERY POSTHOOK: query: explain -SELECT distinct s1.customer_name as x, s1.customer_name as y +SELECT distinct COUNT(*) AS c FROM default.testv1_staples s1 join default.src s2 on s1.customer_name = s2.key HAVING ( (SUM(s1.customer_balance) <= 4074689.000000041) @@ -550,14 +550,13 @@ STAGE PLANS: keys: 0 _col1 (type: string) 1 _col0 (type: string) - outputColumnNames: _col0, _col1, _col2, _col4 + outputColumnNames: _col0, _col2, _col4 Statistics: Num rows: 550 Data size: 5843 Basic stats: PARTIAL Column stats: NONE Group By Operator - aggregations: sum(_col2), avg(_col0), count(_col4) - keys: _col1 (type: string) + aggregations: count(), sum(_col2), avg(_col0), count(_col4) mode: hash outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1 Data size: 104 Basic stats: PARTIAL Column stats: NONE File Output Operator compressed: false table: @@ -570,36 +569,29 @@ STAGE PLANS: Map Operator Tree: TableScan Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 550 Data size: 5843 Basic stats: PARTIAL Column stats: NONE - value expressions: _col1 (type: double), _col2 (type: struct), _col3 (type: bigint) + sort order: + Statistics: Num rows: 1 Data size: 104 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: double), _col2 (type: struct), _col3 (type: bigint) Reduce Operator Tree: Group By Operator - aggregations: sum(VALUE._col0), avg(VALUE._col1), count(VALUE._col2) - keys: KEY._col0 (type: string) + aggregations: count(VALUE._col0), sum(VALUE._col1), avg(VALUE._col2), count(VALUE._col3) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 275 Data size: 2921 Basic stats: PARTIAL Column stats: NONE - Select Operator - expressions: _col0 (type: string), _col1 (type: double), _col2 (type: double), _col3 (type: bigint) - outputColumnNames: _col1, _col2, _col3, _col4 - Statistics: Num rows: 275 Data size: 2921 Basic stats: PARTIAL Column stats: NONE - Filter Operator - predicate: ((_col2 <= 4074689.000000041) and (_col3 <= 822.0) and (_col4 > 4)) (type: boolean) - Statistics: Num rows: 10 Data size: 106 Basic stats: PARTIAL Column stats: NONE - Select Operator - expressions: _col1 (type: string), _col1 (type: string) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 106 Basic stats: PARTIAL Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 10 Data size: 106 Basic stats: PARTIAL Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Statistics: Num rows: 1 Data size: 104 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: ((_col1 <= 4074689.000000041) and (_col2 <= 822.0) and (_col3 > 4)) (type: boolean) + Statistics: Num rows: 1 Data size: 104 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: _col0 (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 104 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 104 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -607,3 +599,171 @@ STAGE PLANS: Processor Tree: ListSink +PREHOOK: query: DROP TABLE IF EXISTS src +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src +PREHOOK: Output: default@src +POSTHOOK: query: DROP TABLE IF EXISTS src +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src +POSTHOOK: Output: default@src +PREHOOK: query: DROP TABLE IF EXISTS src1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src1 +PREHOOK: Output: default@src1 +POSTHOOK: query: DROP TABLE IF EXISTS src1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src1 +POSTHOOK: Output: default@src1 +PREHOOK: query: DROP TABLE IF EXISTS src_json +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src_json +PREHOOK: Output: default@src_json +POSTHOOK: query: DROP TABLE IF EXISTS src_json +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src_json +POSTHOOK: Output: default@src_json +PREHOOK: query: DROP TABLE IF EXISTS src_sequencefile +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src_sequencefile +PREHOOK: Output: default@src_sequencefile +POSTHOOK: query: DROP TABLE IF EXISTS src_sequencefile +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src_sequencefile +POSTHOOK: Output: default@src_sequencefile +PREHOOK: query: DROP TABLE IF EXISTS src_thrift +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src_thrift +PREHOOK: Output: default@src_thrift +POSTHOOK: query: DROP TABLE IF EXISTS src_thrift +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src_thrift +POSTHOOK: Output: default@src_thrift +PREHOOK: query: DROP TABLE IF EXISTS srcbucket +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@srcbucket +PREHOOK: Output: default@srcbucket +POSTHOOK: query: DROP TABLE IF EXISTS srcbucket +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@srcbucket +POSTHOOK: Output: default@srcbucket +PREHOOK: query: DROP TABLE IF EXISTS srcbucket2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@srcbucket2 +PREHOOK: Output: default@srcbucket2 +POSTHOOK: query: DROP TABLE IF EXISTS srcbucket2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@srcbucket2 +POSTHOOK: Output: default@srcbucket2 +PREHOOK: query: DROP TABLE IF EXISTS srcpart +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@srcpart +PREHOOK: Output: default@srcpart +POSTHOOK: query: DROP TABLE IF EXISTS srcpart +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@srcpart +POSTHOOK: Output: default@srcpart +PREHOOK: query: DROP TABLE IF EXISTS primitives +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS primitives +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest4 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest4 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest4_sequencefile +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest4_sequencefile +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest_j1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest_j1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest_g1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest_g1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS dest_g2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS dest_g2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS fetchtask_ioexception +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE IF EXISTS fetchtask_ioexception +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE IF EXISTS alltypesorc +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@alltypesorc +POSTHOOK: query: DROP TABLE IF EXISTS alltypesorc +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@alltypesorc +PREHOOK: query: DROP TABLE IF EXISTS alltypesparquet +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@alltypesparquet +PREHOOK: Output: default@alltypesparquet +POSTHOOK: query: DROP TABLE IF EXISTS alltypesparquet +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@alltypesparquet +POSTHOOK: Output: default@alltypesparquet +PREHOOK: query: DROP TABLE IF EXISTS cbo_t1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@cbo_t1 +PREHOOK: Output: default@cbo_t1 +POSTHOOK: query: DROP TABLE IF EXISTS cbo_t1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@cbo_t1 +POSTHOOK: Output: default@cbo_t1 +PREHOOK: query: DROP TABLE IF EXISTS cbo_t2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@cbo_t2 +PREHOOK: Output: default@cbo_t2 +POSTHOOK: query: DROP TABLE IF EXISTS cbo_t2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@cbo_t2 +POSTHOOK: Output: default@cbo_t2 +PREHOOK: query: DROP TABLE IF EXISTS cbo_t3 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@cbo_t3 +PREHOOK: Output: default@cbo_t3 +POSTHOOK: query: DROP TABLE IF EXISTS cbo_t3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@cbo_t3 +POSTHOOK: Output: default@cbo_t3 +PREHOOK: query: DROP TABLE IF EXISTS src_cbo +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@src_cbo +PREHOOK: Output: default@src_cbo +POSTHOOK: query: DROP TABLE IF EXISTS src_cbo +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@src_cbo +POSTHOOK: Output: default@src_cbo +PREHOOK: query: DROP TABLE IF EXISTS part +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@part +PREHOOK: Output: default@part +POSTHOOK: query: DROP TABLE IF EXISTS part +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@part +POSTHOOK: Output: default@part +PREHOOK: query: DROP TABLE IF EXISTS lineitem +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@lineitem +PREHOOK: Output: default@lineitem +POSTHOOK: query: DROP TABLE IF EXISTS lineitem +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@lineitem +POSTHOOK: Output: default@lineitem diff --git a/ql/src/test/results/compiler/errors/wrong_distinct1.q.out b/ql/src/test/results/compiler/errors/wrong_distinct1.q.out deleted file mode 100644 index 11f48a2..0000000 --- a/ql/src/test/results/compiler/errors/wrong_distinct1.q.out +++ /dev/null @@ -1,2 +0,0 @@ -Semantic Exception: -2:88 SELECT DISTINCT and GROUP BY can not be in the same query. Error encountered near token 'key' \ No newline at end of file