diff --git a/hcatalog/hcatalog-pig-adapter/pom.xml b/hcatalog/hcatalog-pig-adapter/pom.xml index 4d2ca51..7589efb 100644 --- a/hcatalog/hcatalog-pig-adapter/pom.xml +++ b/hcatalog/hcatalog-pig-adapter/pom.xml @@ -53,6 +53,13 @@ tests test + + org.apache.hive + hive-exec + ${project.version} + test-jar + test + diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java index ee3e750..0acac19 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java @@ -18,6 +18,8 @@ */ package org.apache.hive.hcatalog.pig; +import com.google.common.collect.ImmutableSet; + import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; @@ -32,6 +34,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Set; import org.apache.commons.io.FileUtils; @@ -42,6 +45,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.io.IOConstants; +import org.apache.hadoop.hive.ql.io.StorageFormats; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; @@ -67,14 +72,18 @@ import org.joda.time.DateTime; import org.junit.After; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.*; +@RunWith(Parameterized.class) public class TestHCatLoader { private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoader.class); private static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(System.getProperty("java.io.tmpdir") + @@ -91,9 +100,18 @@ private Driver driver; private Map> basicInputData; - protected String storageFormat() { - return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," + - "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')"; + private static final Set DISABLED_STORAGE_FORMATS = ImmutableSet.of( + IOConstants.PARQUETFILE); + + private String storageFormat; + + @Parameterized.Parameters + public static Collection generateParameters() { + return StorageFormats.names(); + } + + public TestHCatLoader(String storageFormat) { + this.storageFormat = storageFormat; } private void dropTable(String tablename) throws IOException, CommandNeedRetryException { @@ -105,7 +123,7 @@ static void dropTable(String tablename, Driver driver) throws IOException, Comma } private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException { - createTable(tablename, schema, partitionedBy, driver, storageFormat()); + createTable(tablename, schema, partitionedBy, driver, storageFormat); } static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat) @@ -143,6 +161,8 @@ private static void checkProjection(FieldSchema fs, String expectedName, byte ex @Before public void setup() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); + File f = new File(TEST_WAREHOUSE_DIR); if (f.exists()) { FileUtil.fullyDelete(f); @@ -209,17 +229,18 @@ public void setup() throws Exception { server.registerQuery("D = load '" + COMPLEX_FILE_NAME + "' as (name:chararray, studentid:int, contact:tuple(phno:chararray,email:chararray), currently_registered_courses:bag{innertup:tuple(course:chararray)}, current_grades:map[ ] , phnos :bag{innertup:tuple(phno:chararray,type:chararray)});", ++i); server.registerQuery("store D into '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i); server.executeBatch(); - } @After public void tearDown() throws Exception { try { - dropTable(BASIC_TABLE); - dropTable(COMPLEX_TABLE); - dropTable(PARTITIONED_TABLE); - dropTable(SPECIFIC_SIZE_TABLE); - dropTable(AllTypesTable.ALL_PRIMITIVE_TYPES_TABLE); + if (driver != null) { + dropTable(BASIC_TABLE); + dropTable(COMPLEX_TABLE); + dropTable(PARTITIONED_TABLE); + dropTable(SPECIFIC_SIZE_TABLE); + dropTable(AllTypesTable.ALL_PRIMITIVE_TYPES_TABLE); + } } finally { FileUtils.deleteDirectory(new File(TEST_DATA_DIR)); } diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java index 40ec597..1ee2bd3 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java @@ -18,19 +18,23 @@ */ package org.apache.hive.hcatalog.pig; +import com.google.common.collect.ImmutableSet; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; - -import junit.framework.Assert; +import java.util.Set; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.io.IOConstants; +import org.apache.hadoop.hive.ql.io.StorageFormats; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; @@ -46,12 +50,18 @@ import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema; +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@RunWith(Parameterized.class) public class TestHCatLoaderComplexSchema { //private static MiniCluster cluster = MiniCluster.buildCluster(); @@ -59,13 +69,22 @@ //private static Properties props; private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoaderComplexSchema.class); - private void dropTable(String tablename) throws IOException, CommandNeedRetryException { - driver.run("drop table " + tablename); + private static final Set DISABLED_STORAGE_FORMATS = ImmutableSet.of( + IOConstants.PARQUETFILE); + + private String storageFormat; + + @Parameterized.Parameters + public static Collection generateParameters() { + return StorageFormats.names(); } - protected String storageFormat() { - return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," + - "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')"; + public TestHCatLoaderComplexSchema(String storageFormat) { + this.storageFormat = storageFormat; + } + + private void dropTable(String tablename) throws IOException, CommandNeedRetryException { + driver.run("drop table " + tablename); } private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException { @@ -74,7 +93,7 @@ private void createTable(String tablename, String schema, String partitionedBy) if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) { createTable = createTable + "partitioned by (" + partitionedBy + ") "; } - createTable = createTable + "stored as " + storageFormat(); + createTable = createTable + "stored as " + storageFormat; LOG.info("Creating table:\n {}", createTable); CommandProcessorResponse result = driver.run(createTable); int retCode = result.getResponseCode(); @@ -98,7 +117,11 @@ public static void setUpBeforeClass() throws Exception { SessionState.start(new CliSessionState(hiveConf)); //props = new Properties(); //props.setProperty("fs.default.name", cluster.getProperties().getProperty("fs.default.name")); + } + @Before + public void setUp() { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); } private static final TupleFactory tf = TupleFactory.getInstance(); diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java index 763af9f..b7a28a6 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java @@ -18,18 +18,24 @@ */ package org.apache.hive.hcatalog.pig; +import com.google.common.collect.ImmutableSet; + import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Properties; +import java.util.Set; import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.io.IOConstants; +import org.apache.hadoop.hive.ql.io.StorageFormats; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hive.hcatalog.HcatTestUtils; @@ -48,19 +54,39 @@ import org.joda.time.DateTimeZone; import org.junit.Assert; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +@RunWith(Parameterized.class) public class TestHCatStorer extends HCatBaseTest { private static final Logger LOG = LoggerFactory.getLogger(TestHCatStorer.class); private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data"; + private static final Set DISABLED_STORAGE_FORMATS = ImmutableSet.of( + IOConstants.PARQUETFILE); + + private String storageFormat; + + @Parameterized.Parameters + public static Collection generateParameters() { + return StorageFormats.names(); + } + + public TestHCatStorer(String storageFormat) { + this.storageFormat = storageFormat; + } + //Start: tests that check values from Pig that are out of range for target column @Test public void testWriteTinyint() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); pigValueRangeTest("junitTypeTest1", "tinyint", "int", null, Integer.toString(1), Integer.toString(1)); pigValueRangeTestOverflow("junitTypeTest1", "tinyint", "int", null, Integer.toString(300)); pigValueRangeTestOverflow("junitTypeTest2", "tinyint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null, @@ -71,6 +97,7 @@ public void testWriteTinyint() throws Exception { @Test public void testWriteSmallint() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); pigValueRangeTest("junitTypeTest1", "smallint", "int", null, Integer.toString(Short.MIN_VALUE), Integer.toString(Short.MIN_VALUE)); pigValueRangeTestOverflow("junitTypeTest2", "smallint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null, @@ -81,6 +108,7 @@ public void testWriteSmallint() throws Exception { @Test public void testWriteChar() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); pigValueRangeTest("junitTypeTest1", "char(5)", "chararray", null, "xxx", "xxx "); pigValueRangeTestOverflow("junitTypeTest1", "char(5)", "chararray", null, "too_long"); pigValueRangeTestOverflow("junitTypeTest2", "char(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null, @@ -91,6 +119,7 @@ public void testWriteChar() throws Exception { @Test public void testWriteVarchar() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); pigValueRangeTest("junitTypeTest1", "varchar(5)", "chararray", null, "xxx", "xxx"); pigValueRangeTestOverflow("junitTypeTest1", "varchar(5)", "chararray", null, "too_long"); pigValueRangeTestOverflow("junitTypeTest2", "varchar(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null, @@ -101,6 +130,7 @@ public void testWriteVarchar() throws Exception { @Test public void testWriteDecimalXY() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); pigValueRangeTest("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(1.2).toString(), BigDecimal.valueOf(1.2).toString()); pigValueRangeTestOverflow("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(12345.12).toString()); @@ -112,6 +142,7 @@ public void testWriteDecimalXY() throws Exception { @Test public void testWriteDecimalX() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); //interestingly decimal(2) means decimal(2,0) pigValueRangeTest("junitTypeTest1", "decimal(2)", "bigdecimal", null, BigDecimal.valueOf(12).toString(), BigDecimal.valueOf(12).toString()); @@ -123,6 +154,7 @@ public void testWriteDecimalX() throws Exception { @Test public void testWriteDecimal() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); //decimal means decimal(10,0) pigValueRangeTest("junitTypeTest1", "decimal", "bigdecimal", null, BigDecimal.valueOf(1234567890).toString(), BigDecimal.valueOf(1234567890).toString()); @@ -137,8 +169,10 @@ public void testWriteDecimal() throws Exception { * include time to make sure it's 0 */ private static final String FORMAT_4_DATE = "yyyy-MM-dd HH:mm:ss"; + @Test public void testWriteDate() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); DateTime d = new DateTime(1991,10,11,0,0); pigValueRangeTest("junitTypeTest1", "date", "datetime", null, d.toString(), d.toString(FORMAT_4_DATE), FORMAT_4_DATE); @@ -157,6 +191,7 @@ public void testWriteDate() throws Exception { @Test public void testWriteDate3() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); DateTime d = new DateTime(1991,10,11,23,10,DateTimeZone.forOffsetHours(-11)); FrontendException fe = null; //expect to fail since the time component is not 0 @@ -170,6 +205,7 @@ public void testWriteDate3() throws Exception { @Test public void testWriteDate2() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); DateTime d = new DateTime(1991,11,12,0,0, DateTimeZone.forID("US/Eastern")); pigValueRangeTest("junitTypeTest1", "date", "datetime", null, d.toString(), d.toString(FORMAT_4_DATE), FORMAT_4_DATE); @@ -193,6 +229,7 @@ public void testWriteDate2() throws Exception { */ @Test public void testWriteTimestamp() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); DateTime d = new DateTime(1991,10,11,14,23,30, 10);//uses default TZ pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), d.toDateTime(DateTimeZone.getDefault()).toString()); @@ -229,13 +266,6 @@ private void pigValueRangeTest(String tblName, String hiveType, String pigType, } /** - * this should be overridden in subclass to test with different file formats - */ - String getStorageFormat() { - return "RCFILE"; - } - - /** * This is used to test how Pig values of various data types which are out of range for Hive target * column are handled. Currently the options are to raise an error or write NULL. * 1. create a data file with 1 column, 1 row @@ -258,7 +288,7 @@ private void pigValueRangeTest(String tblName, String hiveType, String pigType, throws Exception { TestHCatLoader.dropTable(tblName, driver); final String field = "f1"; - TestHCatLoader.createTable(tblName, field + " " + hiveType, null, driver, getStorageFormat()); + TestHCatLoader.createTable(tblName, field + " " + hiveType, null, driver, storageFormat); HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, new String[] {inputValue}); LOG.debug("File=" + INPUT_FILE_NAME); dumpFile(INPUT_FILE_NAME); @@ -334,10 +364,11 @@ private void pigValueRangeTest(String tblName, String hiveType, String pigType, */ @Test public void testDateCharTypes() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); final String tblName = "junit_date_char"; TestHCatLoader.dropTable(tblName, driver); TestHCatLoader.createTable(tblName, - "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, getStorageFormat()); + "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, storageFormat); int NUM_ROWS = 5; String[] rows = new String[NUM_ROWS]; for(int i = 0; i < NUM_ROWS; i++) { @@ -397,9 +428,10 @@ static void dumpFile(String fileName) throws Exception { @Test public void testPartColsInData() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -432,10 +464,11 @@ public void testPartColsInData() throws IOException, CommandNeedRetryException { @Test public void testMultiPartColsInData() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table employee"); String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + - " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat(); + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { @@ -475,9 +508,10 @@ public void testMultiPartColsInData() throws IOException, CommandNeedRetryExcept @Test public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -510,8 +544,9 @@ public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryExce @Test public void testNoAlias() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_parted"); - String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as " + getStorageFormat(); + String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -552,9 +587,10 @@ public void testNoAlias() throws IOException, CommandNeedRetryException { @Test public void testStoreMultiTables() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -607,9 +643,10 @@ public void testStoreMultiTables() throws IOException, CommandNeedRetryException @Test public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -646,9 +683,10 @@ public void testStoreWithNoSchema() throws IOException, CommandNeedRetryExceptio @Test public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -685,9 +723,10 @@ public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryExcept @Test public void testEmptyStore() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -721,9 +760,10 @@ public void testEmptyStore() throws IOException, CommandNeedRetryException { @Test public void testBagNStruct() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); String createTable = "create table junit_unparted(b string,a struct, arr_of_struct array, " + - "arr_of_struct2 array>, arr_of_struct3 array>) stored as " + getStorageFormat(); + "arr_of_struct2 array>, arr_of_struct3 array>) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -754,9 +794,10 @@ public void testBagNStruct() throws IOException, CommandNeedRetryException { @Test public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -809,9 +850,10 @@ public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryEx @Test public void testStoreFuncSimple() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table junit_unparted"); - String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat(); + String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); @@ -850,10 +892,11 @@ public void testStoreFuncSimple() throws IOException, CommandNeedRetryException @Test public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table if exists employee"); String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + - " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat(); + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { @@ -887,10 +930,11 @@ public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOExc @Test public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table if exists employee"); String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + - " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat(); + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { @@ -924,10 +968,11 @@ public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOExceptio @Test public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table if exists employee"); String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " + - " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat(); + " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { @@ -951,11 +996,11 @@ public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOEx driver.run("drop table employee"); } @Test - public void testPartitionPublish() - throws IOException, CommandNeedRetryException { + public void testPartitionPublish() throws IOException, CommandNeedRetryException { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); driver.run("drop table ptn_fail"); - String createTable = "create table ptn_fail(a int, c string) partitioned by (b string) stored as " + getStorageFormat(); + String createTable = "create table ptn_fail(a int, c string) partitioned by (b string) stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table."); diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java index 9679d3c..ebd5f1f 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java @@ -18,18 +18,24 @@ */ package org.apache.hive.hcatalog.pig; +import com.google.common.collect.ImmutableSet; + import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashMap; import java.util.Map; +import java.util.Set; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.io.IOConstants; +import org.apache.hadoop.hive.ql.io.StorageFormats; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.common.HCatUtil; @@ -38,12 +44,16 @@ import org.apache.pig.ExecType; import org.apache.pig.PigServer; +import org.junit.Assume; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import static org.junit.Assert.assertEquals; +@RunWith(Parameterized.class) public class TestHCatStorerMulti { public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName( System.getProperty("user.dir") + "/build/test/data/" + @@ -57,9 +67,18 @@ private static Map> basicInputData; - protected String storageFormat() { - return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," + - "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')"; + private static final Set DISABLED_STORAGE_FORMATS = ImmutableSet.of( + IOConstants.PARQUETFILE); + + private String storageFormat; + + @Parameterized.Parameters + public static Collection generateParameters() { + return StorageFormats.names(); + } + + public TestHCatStorerMulti(String storageFormat) { + this.storageFormat = storageFormat; } private void dropTable(String tablename) throws IOException, CommandNeedRetryException { @@ -72,7 +91,7 @@ private void createTable(String tablename, String schema, String partitionedBy) if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) { createTable = createTable + "partitioned by (" + partitionedBy + ") "; } - createTable = createTable + "stored as " + storageFormat(); + createTable = createTable + "stored as " + storageFormat; int retCode = driver.run(createTable).getResponseCode(); if (retCode != 0) { throw new IOException("Failed to create table. [" + createTable + "], return code from hive driver : [" + retCode + "]"); @@ -85,6 +104,8 @@ private void createTable(String tablename, String schema) throws IOException, Co @Before public void setUp() throws Exception { + Assume.assumeTrue(!DISABLED_STORAGE_FORMATS.contains(storageFormat)); + if (driver == null) { HiveConf hiveConf = new HiveConf(this.getClass()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatLoader.java deleted file mode 100644 index 82eb0d7..0000000 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatLoader.java +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.hive.hcatalog.pig; - -public class TestOrcHCatLoader extends TestHCatLoader { - - @Override - protected String storageFormat() { - return "orc"; - } - -} - diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatLoaderComplexSchema.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatLoaderComplexSchema.java deleted file mode 100644 index 0538771..0000000 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatLoaderComplexSchema.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.hive.hcatalog.pig; - -public class TestOrcHCatLoaderComplexSchema extends TestHCatLoaderComplexSchema { - - @Override - protected String storageFormat() { - return "orc"; - } - -} diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java deleted file mode 100644 index 65769b4..0000000 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.hive.hcatalog.pig; - -import java.io.IOException; - -import org.apache.hadoop.hive.ql.CommandNeedRetryException; - -import org.junit.Ignore; -import org.junit.Test; - -public class TestOrcHCatStorer extends TestHCatStorer { - @Override String getStorageFormat() { - return "ORC"; - } -} diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java deleted file mode 100644 index 77c7979..0000000 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.hive.hcatalog.pig; - -public class TestOrcHCatStorerMulti extends TestHCatStorerMulti { - - @Override - protected String storageFormat() { - return "orc"; - } -} - diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/StorageFormats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/StorageFormats.java index 19fdeb5..d53c029 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/StorageFormats.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/StorageFormats.java @@ -130,5 +130,21 @@ return parameters; } + + /** + * Returns a list of the names of storage formats. + * + * @return List of names of storage formats. + */ + public static Collection names() { + List names = new ArrayList(); + for (StorageFormatDescriptor descriptor : ServiceLoader.load(StorageFormatDescriptor.class)) { + String[] formatNames = new String[descriptor.getNames().size()]; + formatNames = descriptor.getNames().toArray(formatNames); + String[] params = { formatNames[0] }; + names.add(params); + } + return names; + } }