diff --git .gitignore .gitignore index 4d341a0..47c59da 100644 --- .gitignore +++ .gitignore @@ -27,4 +27,5 @@ hcatalog/webhcat/java-client/target hcatalog/storage-handlers/hbase/target hcatalog/webhcat/svr/target conf/hive-default.xml.template +itests/hive-blobstore/src/test/resources/blobstore-conf.xml .DS_Store diff --git data/conf/blobstore/hive-site.xml data/conf/blobstore/hive-site.xml new file mode 100644 index 0000000..4702a93 --- /dev/null +++ data/conf/blobstore/hive-site.xml @@ -0,0 +1,262 @@ + + + + + + + hive.in.test + true + Internal marker for test. Used for masking env-dependent values + + + + + + + + + + + hadoop.tmp.dir + ${test.tmp.dir}/hadoop-tmp + A base for other temporary directories. + + + + + + hive.exec.scratchdir + ${test.tmp.dir}/scratchdir + Scratch space for Hive jobs + + + + hive.exec.local.scratchdir + ${test.tmp.dir}/localscratchdir/ + Local scratch space for Hive jobs + + + + local.tmp.dir + file:///${env:PWD}/target/tmp + + + + datanucleus.schema.autoCreateAll + true + + + + hive.metastore.schema.verification + false + + + + javax.jdo.option.ConnectionURL + jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true + + + + hive.stats.dbconnectionstring + jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true + + + + javax.jdo.option.ConnectionDriverName + org.apache.derby.jdbc.EmbeddedDriver + + + + javax.jdo.option.ConnectionUserName + APP + + + + javax.jdo.option.ConnectionPassword + mine + + + + + hive.metastore.warehouse.dir + ${test.warehouse.dir} + + + + + hive.metastore.metadb.dir + file://${test.tmp.dir}/metadb/ + + Required by metastore server or if the uris argument below is not supplied + + + + + test.log.dir + ${test.tmp.dir}/log/ + + + + + test.data.files + ${hive.root}/data/files + + + + + test.data.scripts + ${basedir}/src/test/data/scripts + + + + + hive.jar.path + ${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar + + + + + hive.metastore.rawstore.impl + org.apache.hadoop.hive.metastore.ObjectStore + Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database + + + + hive.querylog.location + ${test.tmp.dir}/tmp + Location of the structured hive logs + + + + hive.exec.pre.hooks + org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables + Pre Execute Hook for Tests + + + + hive.exec.post.hooks + org.apache.hadoop.hive.ql.hooks.PostExecutePrinter + Post Execute Hook for Tests + + + + hive.support.concurrency + true + Whether hive supports concurrency or not. A zookeeper instance must be up and running for the default hive lock manager to support read-write locks. + + + + hive.unlock.numretries + 2 + The number of times you want to retry to do one unlock + + + + hive.lock.sleep.between.retries + 2 + The sleep time (in seconds) between various retries + + + + fs.pfile.impl + org.apache.hadoop.fs.ProxyLocalFileSystem + A proxy for local file system used for cross file system testing + + + + hive.exec.mode.local.auto + false + + Let hive determine whether to run in local mode automatically + Disabling this for tests so that minimr is not affected + + + + + hive.auto.convert.join + false + Whether Hive enable the optimization about converting common join into mapjoin based on the input file size + + + + hive.ignore.mapjoin.hint + false + Whether Hive ignores the mapjoin hint + + + + hive.input.format + org.apache.hadoop.hive.ql.io.CombineHiveInputFormat + The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. + + + + hive.default.rcfile.serde + org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + The default SerDe hive will use for the rcfile format + + + + hive.stats.key.prefix.reserve.length + 0 + + + + hive.conf.restricted.list + dummy.config.value + Using dummy config value above because you cannot override config with empty value + + + + hive.exec.submit.local.task.via.child + false + + + + hive.optimize.sort.dynamic.partition + true + + + + hive.dummyparam.test.server.specific.config.override + from.hive-site.xml + Using dummy param to test server specific configuration + + + + hive.dummyparam.test.server.specific.config.hivesite + from.hive-site.xml + Using dummy param to test server specific configuration + + + + hive.fetch.task.conversion + minimal + + + + hive.users.in.admin.role + hive_admin_user + + diff --git itests/hive-blobstore/README itests/hive-blobstore/README new file mode 100644 index 0000000..b362836 --- /dev/null +++ itests/hive-blobstore/README @@ -0,0 +1,19 @@ +The folder structure details are: + + * ./src/test/queries - contains the queries to be tested on s3 + * ./src/test/results - contains the expected hive console output for the queries + * ./target/qfile-results - Hive console output goes here + * ../../data/conf/blobstore/ - contains hive-site.xml + +To run blobstore integration tests: + + 1. Create blobstore-conf.xml in ./src/test/resources/ with the blobstore credentials (see blobstore-conf.xml.template). + + 2. Run following command: + mvn test -Dtest=TestBlobstore[Negative]CliDriverS3 -Dtest.blobstore.path=s3://my-bucket/hive/s3/it/prefix + +Unique source and output paths are created from the test.blobstore.path argument to prevent issues during parallel test execution. + +Blobstore source tables (tables created initially used to populate output tables) can be created on the path ${hiveconf:test.blobstore.sources} and should be updated in ./src/test/data/scripts/blobstore_test_init.q. + +Blobstore output tables can be created on the path ${hiveconf:test.blobstore.output} and should be created and deleted between tests (see ./src/test/queries/clientpositive/insert_into.q) \ No newline at end of file diff --git itests/hive-blobstore/pom.xml itests/hive-blobstore/pom.xml new file mode 100644 index 0000000..4a2f318 --- /dev/null +++ itests/hive-blobstore/pom.xml @@ -0,0 +1,357 @@ + + + + 4.0.0 + + + org.apache.hive + hive-it + 2.2.0-SNAPSHOT + ../pom.xml + + + hive-blobstore + jar + Hive Integration - Blobstore Tests + + + ../.. + + + + false + + false + ${hadoop.version} + -mkdir -p + + + + + tests-off + + + src/test/resources/blobstore-conf.xml + + + + true + + + + tests-on + + + src/test/resources/blobstore-conf.xml + + + + false + + + + + + + + org.apache.hive + hive-ant + ${project.version} + test + + + org.apache.hive + hive-common + ${project.version} + test + + + org.apache.hive + hive-contrib + ${project.version} + test + + + org.apache.hive + hive-metastore + ${project.version} + test + + + org.apache.hive + hive-metastore + ${project.version} + tests + test + + + org.apache.hive + hive-it-unit + ${project.version} + tests + test + + + org.apache.hive + hive-serde + ${project.version} + test + + + org.apache.hive + hive-exec + ${project.version} + test + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + test + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + tests + test + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop.version} + tests + test + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + org.apache.hadoop + hadoop-mapreduce-client-hs + ${hadoop.version} + test + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + test + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + org.apache.tez + tez-tests + ${tez.version} + test-jar + + + org.apache.tez + tez-api + ${tez.version} + test + + + org.apache.tez + tez-runtime-library + ${tez.version} + test + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + org.apache.tez + tez-mapreduce + ${tez.version} + test + + + org.apache.tez + tez-dag + ${tez.version} + test + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + junit + junit + ${junit.version} + test + + + org.apache.hadoop + hadoop-aws + ${hadoop.version} + compile + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + com.fasterxml.jackson.core + jackson-annotations + ${jackson.new.version} + + + com.fasterxml.jackson.core + jackson-core + ${jackson.new.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.new.version} + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + blobstore_src + ${test.blobstore.path} + + + + + org.codehaus.mojo + properties-maven-plugin + 1.0-alpha-2 + + + initialize + + read-project-properties + + + + ${basedir}/../src/test/resources/testconfiguration.properties + + + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + generate-tests-sources + generate-test-sources + + + + + + + + + + + run + + + + + + org.codehaus.mojo + build-helper-maven-plugin + ${maven.build-helper.plugin.version} + + + add-test-sources + generate-test-sources + + add-test-source + + + + target/generated-test-sources/java + + + + + + + + + diff --git itests/hive-blobstore/src/test/data/scripts/blobstore_test_cleanup.q itests/hive-blobstore/src/test/data/scripts/blobstore_test_cleanup.q new file mode 100644 index 0000000..02329ab --- /dev/null +++ itests/hive-blobstore/src/test/data/scripts/blobstore_test_cleanup.q @@ -0,0 +1 @@ +DROP TABLE blobstore_src purge; diff --git itests/hive-blobstore/src/test/data/scripts/blobstore_test_init.q itests/hive-blobstore/src/test/data/scripts/blobstore_test_init.q new file mode 100644 index 0000000..2036bae --- /dev/null +++ itests/hive-blobstore/src/test/data/scripts/blobstore_test_init.q @@ -0,0 +1,14 @@ +dfs -mkdir -p ${hiveconf:test.blobstore.sources}; +set hive.stats.dbclass=fs; +-- +-- Table blobstore_src +-- +DROP TABLE IF EXISTS blobstore_src; +CREATE TABLE blobstore_src (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE +LOCATION '${hiveconf:test.blobstore.sources}/blobstore_src'; +LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE blobstore_src; +ANALYZE TABLE blobstore_src COMPUTE STATISTICS; +ANALYZE TABLE blobstore_src COMPUTE STATISTICS FOR COLUMNS key,value; + +reset; +set hive.stats.dbclass=fs; diff --git itests/hive-blobstore/src/test/java/org/apache/hadoop/hive/cli/TestBlobstoreCliDriver.java itests/hive-blobstore/src/test/java/org/apache/hadoop/hive/cli/TestBlobstoreCliDriver.java new file mode 100644 index 0000000..d54198f --- /dev/null +++ itests/hive-blobstore/src/test/java/org/apache/hadoop/hive/cli/TestBlobstoreCliDriver.java @@ -0,0 +1,45 @@ +package org.apache.hadoop.hive.cli; + +import java.io.File; +import java.util.List; + +import org.apache.hadoop.hive.cli.control.CliAdapter; +import org.apache.hadoop.hive.cli.control.CliConfigs; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class TestBlobstoreCliDriver { + + static CliAdapter adapter = new CliConfigs.BlobstoreCliConfig().getCliAdapter(); + + @Parameters(name = "{0}") + public static List getParameters() throws Exception { + return adapter.getParameters(); + } + + @ClassRule + public static TestRule cliClassRule = adapter.buildClassRule(); + + @Rule + public TestRule cliTestRule = adapter.buildTestRule(); + + private String name; + private File qfile; + + public TestBlobstoreCliDriver(String name, File qfile) { + this.name = name; + this.qfile = qfile; + } + + @Test + public void testCliDriver() throws Exception { + adapter.runTest(name, qfile); + } + +} diff --git itests/hive-blobstore/src/test/java/org/apache/hadoop/hive/cli/TestBlobstoreNegativeCliDriver.java itests/hive-blobstore/src/test/java/org/apache/hadoop/hive/cli/TestBlobstoreNegativeCliDriver.java new file mode 100644 index 0000000..791beb3 --- /dev/null +++ itests/hive-blobstore/src/test/java/org/apache/hadoop/hive/cli/TestBlobstoreNegativeCliDriver.java @@ -0,0 +1,45 @@ +package org.apache.hadoop.hive.cli; + +import java.io.File; +import java.util.List; + +import org.apache.hadoop.hive.cli.control.CliAdapter; +import org.apache.hadoop.hive.cli.control.CliConfigs; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class TestBlobstoreNegativeCliDriver { + + static CliAdapter adapter = new CliConfigs.BlobstoreNegativeCliConfig().getCliAdapter(); + + @Parameters(name = "{0}") + public static List getParameters() throws Exception { + return adapter.getParameters(); + } + + @ClassRule + public static TestRule cliClassRule = adapter.buildClassRule(); + + @Rule + public TestRule cliTestRule = adapter.buildTestRule(); + + private String name; + private File qfile; + + public TestBlobstoreNegativeCliDriver(String name, File qfile) { + this.name = name; + this.qfile = qfile; + } + + @Test + public void testCliDriver() throws Exception { + adapter.runTest(name, qfile); + } + +} diff --git itests/hive-blobstore/src/test/queries/clientnegative/select_dropped_table.q itests/hive-blobstore/src/test/queries/clientnegative/select_dropped_table.q new file mode 100644 index 0000000..295d9dd --- /dev/null +++ itests/hive-blobstore/src/test/queries/clientnegative/select_dropped_table.q @@ -0,0 +1,4 @@ +create table qtest (key string, value string) +location '${hiveconf:test.blobstore.output}/qtest'; +drop table qtest; +select * from qtest; diff --git itests/hive-blobstore/src/test/queries/clientpositive/insert_into.q itests/hive-blobstore/src/test/queries/clientpositive/insert_into.q new file mode 100644 index 0000000..31ec45f --- /dev/null +++ itests/hive-blobstore/src/test/queries/clientpositive/insert_into.q @@ -0,0 +1,5 @@ +DROP TABLE qtest; +CREATE TABLE qtest (value int) LOCATION '${hiveconf:test.blobstore.output}/qtest/'; +INSERT INTO qtest VALUES (1), (10), (100), (1000); +SELECT * FROM qtest; +DROP TABLE qtest; diff --git itests/hive-blobstore/src/test/queries/clientpositive/insert_into_2.q itests/hive-blobstore/src/test/queries/clientpositive/insert_into_2.q new file mode 100644 index 0000000..e64bb15 --- /dev/null +++ itests/hive-blobstore/src/test/queries/clientpositive/insert_into_2.q @@ -0,0 +1,13 @@ +drop table blobstore_table; +create table blobstore_table (key string, value string) +location '${hiveconf:test.blobstore.output}/blobstore_table'; + +select count(*) from blobstore_src; + +insert overwrite table blobstore_table select * from blobstore_src; +select count(*) from blobstore_table; + +insert into table blobstore_table select * from blobstore_src; +select count(*) from blobstore_table; + +drop table blobstore_table; diff --git itests/hive-blobstore/src/test/resources/blobstore-conf.xml.template itests/hive-blobstore/src/test/resources/blobstore-conf.xml.template new file mode 100644 index 0000000..184484a --- /dev/null +++ itests/hive-blobstore/src/test/resources/blobstore-conf.xml.template @@ -0,0 +1,18 @@ + + + + + + fs.s3a.access.key + + + + fs.s3a.secret.key + + + diff --git itests/hive-blobstore/src/test/resources/core-site.xml itests/hive-blobstore/src/test/resources/core-site.xml new file mode 100644 index 0000000..8c41135 --- /dev/null +++ itests/hive-blobstore/src/test/resources/core-site.xml @@ -0,0 +1,48 @@ + + + + + + + + + + hadoop.tmp.dir + target/build/test + A base for other temporary directories. + true + + + + + hadoop.security.authentication + simple + + + + + + + + diff --git itests/hive-blobstore/src/test/resources/testconfiguration.properties itests/hive-blobstore/src/test/resources/testconfiguration.properties new file mode 100644 index 0000000..1db1510 --- /dev/null +++ itests/hive-blobstore/src/test/resources/testconfiguration.properties @@ -0,0 +1,2 @@ +include.query.files= +exclude.query.files= diff --git itests/hive-blobstore/src/test/results/clientnegative/select_dropped_table.q.out itests/hive-blobstore/src/test/results/clientnegative/select_dropped_table.q.out new file mode 100644 index 0000000..bfdf3c1 --- /dev/null +++ itests/hive-blobstore/src/test/results/clientnegative/select_dropped_table.q.out @@ -0,0 +1,21 @@ +PREHOOK: query: create table qtest (key string, value string) +location '#### A masked pattern was here ####' +PREHOOK: type: CREATETABLE +PREHOOK: Input: #### A masked pattern was here #### +PREHOOK: Output: database:default +PREHOOK: Output: default@qtest +POSTHOOK: query: create table qtest (key string, value string) +location '#### A masked pattern was here ####' +POSTHOOK: type: CREATETABLE +POSTHOOK: Input: #### A masked pattern was here #### +POSTHOOK: Output: database:default +POSTHOOK: Output: default@qtest +PREHOOK: query: drop table qtest +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@qtest +PREHOOK: Output: default@qtest +POSTHOOK: query: drop table qtest +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@qtest +POSTHOOK: Output: default@qtest +FAILED: SemanticException [Error 10001]: Line 2:14 Table not found 'qtest' diff --git itests/hive-blobstore/src/test/results/clientpositive/insert_into.q.out itests/hive-blobstore/src/test/results/clientpositive/insert_into.q.out new file mode 100644 index 0000000..572af93 --- /dev/null +++ itests/hive-blobstore/src/test/results/clientpositive/insert_into.q.out @@ -0,0 +1,43 @@ +PREHOOK: query: DROP TABLE qtest +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE qtest +POSTHOOK: type: DROPTABLE +#### A masked pattern was here #### +PREHOOK: type: CREATETABLE +PREHOOK: Input: #### A masked pattern was here #### +PREHOOK: Output: database:default +PREHOOK: Output: default@qtest +#### A masked pattern was here #### +POSTHOOK: type: CREATETABLE +POSTHOOK: Input: #### A masked pattern was here #### +POSTHOOK: Output: database:default +POSTHOOK: Output: default@qtest +PREHOOK: query: INSERT INTO qtest VALUES (1), (10), (100), (1000) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@qtest +POSTHOOK: query: INSERT INTO qtest VALUES (1), (10), (100), (1000) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@qtest +POSTHOOK: Lineage: qtest.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: SELECT * FROM qtest +PREHOOK: type: QUERY +PREHOOK: Input: default@qtest +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM qtest +POSTHOOK: type: QUERY +POSTHOOK: Input: default@qtest +#### A masked pattern was here #### +1 +10 +100 +1000 +PREHOOK: query: DROP TABLE qtest +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@qtest +PREHOOK: Output: default@qtest +POSTHOOK: query: DROP TABLE qtest +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@qtest +POSTHOOK: Output: default@qtest diff --git itests/hive-blobstore/src/test/results/clientpositive/insert_into_2.q.out itests/hive-blobstore/src/test/results/clientpositive/insert_into_2.q.out new file mode 100644 index 0000000..7412ad1 --- /dev/null +++ itests/hive-blobstore/src/test/results/clientpositive/insert_into_2.q.out @@ -0,0 +1,71 @@ +PREHOOK: query: drop table blobstore_table +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table blobstore_table +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table blobstore_table (key string, value string) +location '#### A masked pattern was here ####' +PREHOOK: type: CREATETABLE +PREHOOK: Input: #### A masked pattern was here #### +PREHOOK: Output: database:default +PREHOOK: Output: default@blobstore_table +POSTHOOK: query: create table blobstore_table (key string, value string) +location '#### A masked pattern was here ####' +POSTHOOK: type: CREATETABLE +POSTHOOK: Input: #### A masked pattern was here #### +POSTHOOK: Output: database:default +POSTHOOK: Output: default@blobstore_table +PREHOOK: query: select count(*) from blobstore_src +PREHOOK: type: QUERY +PREHOOK: Input: default@blobstore_src +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from blobstore_src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@blobstore_src +#### A masked pattern was here #### +500 +PREHOOK: query: insert overwrite table blobstore_table select * from blobstore_src +PREHOOK: type: QUERY +PREHOOK: Input: default@blobstore_src +PREHOOK: Output: default@blobstore_table +POSTHOOK: query: insert overwrite table blobstore_table select * from blobstore_src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@blobstore_src +POSTHOOK: Output: default@blobstore_table +POSTHOOK: Lineage: blobstore_table.key SIMPLE [(blobstore_src)blobstore_src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: blobstore_table.value SIMPLE [(blobstore_src)blobstore_src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select count(*) from blobstore_table +PREHOOK: type: QUERY +PREHOOK: Input: default@blobstore_table +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from blobstore_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@blobstore_table +#### A masked pattern was here #### +500 +PREHOOK: query: insert into table blobstore_table select * from blobstore_src +PREHOOK: type: QUERY +PREHOOK: Input: default@blobstore_src +PREHOOK: Output: default@blobstore_table +POSTHOOK: query: insert into table blobstore_table select * from blobstore_src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@blobstore_src +POSTHOOK: Output: default@blobstore_table +POSTHOOK: Lineage: blobstore_table.key SIMPLE [(blobstore_src)blobstore_src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: blobstore_table.value SIMPLE [(blobstore_src)blobstore_src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select count(*) from blobstore_table +PREHOOK: type: QUERY +PREHOOK: Input: default@blobstore_table +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from blobstore_table +POSTHOOK: type: QUERY +POSTHOOK: Input: default@blobstore_table +#### A masked pattern was here #### +1000 +PREHOOK: query: drop table blobstore_table +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@blobstore_table +PREHOOK: Output: default@blobstore_table +POSTHOOK: query: drop table blobstore_table +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@blobstore_table +POSTHOOK: Output: default@blobstore_table diff --git itests/pom.xml itests/pom.xml index 426ba04..14dfad3 100644 --- itests/pom.xml +++ itests/pom.xml @@ -36,6 +36,7 @@ custom-udfs hcatalog-unit hive-unit + hive-blobstore util test-serde qtest diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index 2c82e99..046e5b9 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -498,4 +498,44 @@ public SparkNegativeCliConfig() { } } } + + public static class BlobstoreCliConfig extends AbstractCliConfig { + public BlobstoreCliConfig() { + super(CoreBlobstoreCliDriver.class); + try { + setQueryDir("itests/hive-blobstore/src/test/queries/clientpositive"); + + setResultsDir("itests/hive-blobstore/src/test/results/clientpositive"); + setLogDir("itests/hive-blobstore/target/qfile-results/clientpositive"); + + setInitScript("blobstore_test_init.q"); + setCleanupScript("blobstore_test_cleanup.q"); + + setHiveConfDir("data/conf/blobstore"); + setClusterType(MiniClusterType.none); + } catch (Exception e) { + throw new RuntimeException("can't construct cliconfig", e); + } + } + } + + public static class BlobstoreNegativeCliConfig extends AbstractCliConfig { + public BlobstoreNegativeCliConfig() { + super(CoreBlobstoreNegativeCliDriver.class); + try { + setQueryDir("itests/hive-blobstore/src/test/queries/clientnegative"); + + setResultsDir("itests/hive-blobstore/src/test/results/clientnegative"); + setLogDir("itests/hive-blobstore/target/qfile-results/clientnegative"); + + setInitScript("blobstore_test_init.q"); + setCleanupScript("blobstore_test_cleanup.q"); + + setHiveConfDir("data/conf/blobstore"); + setClusterType(MiniClusterType.none); + } catch (Exception e) { + throw new RuntimeException("can't construct cliconfig", e); + } + } + } } diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreCliDriver.java new file mode 100644 index 0000000..56743fc --- /dev/null +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreCliDriver.java @@ -0,0 +1,139 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.cli.control; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.apache.hadoop.hive.cli.control.AbstractCliConfig.MetastoreType; +import org.apache.hadoop.hive.ql.QTestUtil; +import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +public class CoreBlobstoreCliDriver extends CliAdapter { + + private static QTestUtil qt; + private static final String testSessionId = CoreBlobstoreCliDriverUtil.generateTestId(); + + public CoreBlobstoreCliDriver(AbstractCliConfig testCliConfig) { + super(testCliConfig); + } + + @Override + @BeforeClass + public void beforeClass() { + MiniClusterType miniMR =cliConfig.getClusterType(); + String hiveConfDir = cliConfig.getHiveConfDir(); + String initScript = cliConfig.getInitScript(); + String cleanupScript = cliConfig.getCleanupScript(); + boolean useHBaseMetastore = cliConfig.getMetastoreType() == MetastoreType.hbase; + try { + String hadoopVer = cliConfig.getHadoopVersion(); + qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, + hiveConfDir, hadoopVer, initScript, cleanupScript, useHBaseMetastore, true); + + // do a one time initialization + CoreBlobstoreCliDriverUtil.configureEnvironment(testSessionId, qt.getConf()); + qt.cleanUp(); + qt.createSources(); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + throw new RuntimeException("Unexpected exception in static initialization",e); + } + } + + @Override + @Before + public void setUp() { + try { + qt.clearTestSideEffects(); + CoreBlobstoreCliDriverUtil.configureEnvironment(testSessionId, qt.getConf()); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in setup"); + } + } + + @Override + @After + public void tearDown() { + try { + qt.clearPostTestEffects(); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in tearDown"); + } + } + + @Override + @AfterClass + public void shutdown() throws Exception { + try { + qt.shutdown(); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in shutdown"); + } + } + + static String debugHint = "\nSee ./itests/hive-blobstore/target/tmp/log/hive.log, " + + "or check ./itests/hive-blobstore/target/surefire-reports/ for specific test cases logs."; + + @Override + public void runTest(String tname, String fname, String fpath) throws Exception { + long startTime = System.currentTimeMillis(); + try { + System.err.println("Begin query: " + fname); + + qt.addFile(fpath); + + if (qt.shouldBeSkipped(fname)) { + System.err.println("Test " + fname + " skipped"); + return; + } + + qt.cliInit(fname, false); + int ecode = qt.executeClient(fname); + if (ecode != 0) { + qt.failed(ecode, fname, debugHint); + } + ecode = qt.checkCliDriverResults(fname); + if (ecode != 0) { + qt.failedDiff(ecode, fname, debugHint); + } + } + catch (Throwable e) { + qt.failed(e, fname, debugHint); + } + + long elapsedTime = System.currentTimeMillis() - startTime; + System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); + assertTrue("Test passed", true); + } +} diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreCliDriverUtil.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreCliDriverUtil.java new file mode 100644 index 0000000..85280ca --- /dev/null +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreCliDriverUtil.java @@ -0,0 +1,43 @@ +/* + * Copyright 2015 The Apache Software Foundation. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.cli.control; + +import org.apache.hadoop.hive.conf.HiveConf; +import static java.util.UUID.randomUUID; + +final class CoreBlobstoreCliDriverUtil { + + private static final String SYS_PROP_TEST_BLOBSTORE_PATH = "test.blobstore.path"; + private static final String HCONF_TEST_BLOBSTORE_TMP = "test.blobstore.tmp"; + private static final String HCONF_TEST_BLOBSTORE_OUTPUT = "test.blobstore.output"; + private static final String HCONF_TEST_BLOBSTORE_SOURCES = "test.blobstore.sources"; + private static final String BLOBSTORE_PREFIX_DELIMITER = "/"; + + private CoreBlobstoreCliDriverUtil() {} + + static void configureEnvironment(String testSessionId, HiveConf conf) { + String blobstoreTmp = System.getProperty(SYS_PROP_TEST_BLOBSTORE_PATH) + BLOBSTORE_PREFIX_DELIMITER + testSessionId; + String blobstoreSources = blobstoreTmp + BLOBSTORE_PREFIX_DELIMITER + "sources"; + String blobstoreOutput = blobstoreTmp + BLOBSTORE_PREFIX_DELIMITER + "output"; + conf.set(HCONF_TEST_BLOBSTORE_SOURCES, blobstoreSources); + conf.set(HCONF_TEST_BLOBSTORE_OUTPUT, blobstoreOutput); + conf.set(HCONF_TEST_BLOBSTORE_TMP, blobstoreTmp); + } + + static String generateTestId() { + return System.currentTimeMillis() + "_" + randomUUID(); + } +} \ No newline at end of file diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreNegativeCliDriver.java new file mode 100644 index 0000000..da87870 --- /dev/null +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBlobstoreNegativeCliDriver.java @@ -0,0 +1,142 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.cli.control; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.apache.hadoop.hive.ql.QTestUtil; +import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; + +public class CoreBlobstoreNegativeCliDriver extends CliAdapter{ + + private QTestUtil qt; + private static final String testSessionId = CoreBlobstoreCliDriverUtil.generateTestId(); + + public CoreBlobstoreNegativeCliDriver(AbstractCliConfig testCliConfig) { + super(testCliConfig); + } + + @Override + public void beforeClass(){ + MiniClusterType miniMR = cliConfig.getClusterType(); + String hiveConfDir = cliConfig.getHiveConfDir(); + String initScript = cliConfig.getInitScript(); + String cleanupScript = cliConfig.getCleanupScript(); + + try { + String hadoopVer = cliConfig.getHadoopVersion(); + qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, + hiveConfDir, hadoopVer, initScript, cleanupScript, false, false); + // do a one time initialization + CoreBlobstoreCliDriverUtil.configureEnvironment(testSessionId, qt.getConf()); + qt.cleanUp(); + qt.createSources(); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in static initialization"); + } + } + + @Override + @Before + public void setUp() { + try { + qt.clearTestSideEffects(); + CoreBlobstoreCliDriverUtil.configureEnvironment(testSessionId, qt.getConf()); + } catch (Throwable e) { + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in setup"); + } + } + + @Override + @After + public void tearDown() { + try { + qt.clearPostTestEffects(); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in tearDown"); + } + } + + @Override + @AfterClass + public void shutdown() throws Exception { + try { + qt.shutdown(); + } catch (Exception e) { + System.err.println("Exception: " + e.getMessage()); + e.printStackTrace(); + System.err.flush(); + fail("Unexpected exception in shutdown"); + } + } + + /** + * Dummy last test. This is only meant to shutdown qt + */ + public void testNegativeCliDriver_shutdown() { + System.err.println ("Cleaning up " + "$className"); + } + + static String debugHint = "\nSee ./itests/hive-blobstore/target/tmp/log/hive.log, " + + "or check ./itests/hive-blobstore/target/surefire-reports/ for specific test cases logs."; + + @Override + public void runTest(String tname, String fname, String fpath) throws Exception { + long startTime = System.currentTimeMillis(); + try { + System.err.println("Begin query: " + fname); + + qt.addFile(fpath); + + if (qt.shouldBeSkipped(fname)) { + System.err.println("Test " + fname + " skipped"); + return; + } + + qt.cliInit(fname, false); + int ecode = qt.executeClient(fname); + if (ecode == 0) { + qt.failed(fname, debugHint); + } + + ecode = qt.checkCliDriverResults(fname); + if (ecode != 0) { + qt.failedDiff(ecode, fname, debugHint); + } + } + catch (Throwable e) { + qt.failed(e, fname, debugHint); + } + + long elapsedTime = System.currentTimeMillis() - startTime; + System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime/1000 + "s"); + assertTrue("Test passed", true); + } +} diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 358ba51..2d9aec7 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -1487,7 +1487,8 @@ private void maskPatterns(Pattern[] patterns, String fname) throws Exception { "fk_-?[0-9]*_[0-9]*_[0-9]*", ".*at com\\.sun\\.proxy.*", ".*at com\\.jolbox.*", - "org\\.apache\\.hadoop\\.hive\\.metastore\\.model\\.MConstraint@([0-9]|[a-z])*" + "org\\.apache\\.hadoop\\.hive\\.metastore\\.model\\.MConstraint@([0-9]|[a-z])*", + "(s3.?|swift|wasb.?):\\/\\/[\\w\\.\\/-]*" }); private final Pattern[] partialReservedPlanMask = toPattern(new String[] { @@ -2166,7 +2167,7 @@ public int compare(String str1, String str2) { int i = 0; while (!statements.isEmpty()) { // PreparedStatement extend Statement - Statement st = (Statement)statements.remove(i); + Statement st = statements.remove(i); try { if (st != null) { st.close();