diff --git hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java index 78d7ae4..bf2b24e 100644 --- hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java +++ hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java @@ -21,6 +21,7 @@ import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import junit.framework.TestCase; @@ -42,8 +43,11 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.hcatalog.ExitException; import org.apache.hive.hcatalog.NoExitSecurityManager; @@ -229,6 +233,12 @@ private Table getTable(String dbName, String tblName, String typeName) throws No tbl.setDbName(dbName); tbl.setTableName(tblName); StorageDescriptor sd = new StorageDescriptor(); + sd.setSerdeInfo(new SerDeInfo()); + sd.getSerdeInfo().setName(tblName); + sd.getSerdeInfo().setParameters(new HashMap()); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); tbl.setSd(sd); sd.setCols(typ1.getFields()); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 74a4aff..130fd67 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -67,8 +67,11 @@ import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.util.StringUtils; import org.apache.thrift.TException; import org.junit.Test; @@ -192,6 +195,9 @@ public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf .put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.setSortCols(new ArrayList()); sd.setStoredAsSubDirectories(false); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); //skewed information SkewedInfo skewInfor = new SkewedInfo(); @@ -629,6 +635,9 @@ public void testAlterViewParititon() throws Throwable { sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); sd.setSortCols(new ArrayList()); client.createTable(tbl); @@ -738,6 +747,9 @@ public void testAlterPartition() throws Throwable { sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); sd.setSortCols(new ArrayList()); tbl.setPartitionKeys(new ArrayList(2)); @@ -834,6 +846,9 @@ public void testRenamePartition() throws Throwable { sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); sd.setSortCols(new ArrayList()); tbl.setPartitionKeys(new ArrayList(2)); @@ -1217,6 +1232,9 @@ public void testSimpleTable() throws Exception { org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setInputFormat(HiveOutputFormat.class.getName()); + tbl.setPartitionKeys(new ArrayList()); client.createTable(tbl); @@ -1570,6 +1588,10 @@ public void testAlterTable() throws Exception { sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters().put( org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); + boolean failed = false; try { client.createTable(tbl); @@ -1739,7 +1761,9 @@ public void testComplexTable() throws Exception { org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "9"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); - + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); + tbl.setPartitionKeys(new ArrayList(2)); tbl.getPartitionKeys().add( new FieldSchema("ds", @@ -1833,6 +1857,8 @@ public void testTableDatabase() throws Exception { org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "9"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); tbl.setSd(sd); tbl.getSd().setCols(cols); @@ -1946,6 +1972,9 @@ public void testPartitionFilter() throws Exception { sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); sd.setSortCols(new ArrayList()); tbl.setPartitionKeys(partCols); @@ -2136,6 +2165,9 @@ public void testFilterSinglePartition() throws Exception { sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); sd.setSortCols(new ArrayList()); tbl.setPartitionKeys(partCols); @@ -2713,7 +2745,10 @@ private StorageDescriptor createStorageDescriptor(String tableName, sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.setSortCols(new ArrayList()); - + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); + return sd; } diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java index 7e2c27d..c0f0d26 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java @@ -42,8 +42,11 @@ import org.apache.hadoop.hive.metastore.events.DropPartitionEvent; import org.apache.hadoop.hive.metastore.events.DropTableEvent; import org.apache.hadoop.hive.metastore.events.ListenerEvent; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.ShimLoader; import org.mortbay.log.Log; @@ -107,7 +110,10 @@ protected void setUp() throws Exception { sd.getSerdeInfo().setName(tblName); sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); - + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); + table.setDbName(dbName); table.setTableName(tblName); table.setParameters(tableParams); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java index 1b688bd..22fdb69 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.metastore.tools.HiveMetaTool; +import org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat; +import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; import org.apache.hadoop.util.StringUtils; @@ -117,6 +119,8 @@ protected void setUp() throws Exception { sd.getParameters().put(AvroSerdeUtils.SCHEMA_URL, avroUri); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.avro.AvroSerDe.class.getName()); + sd.setInputFormat(AvroContainerInputFormat.class.getName()); + sd.setOutputFormat(AvroContainerOutputFormat.class.getName()); tbl.setPartitionKeys(new ArrayList()); client.createTable(tbl); @@ -141,6 +145,9 @@ protected void setUp() throws Exception { sd.getParameters().put(AvroSerdeUtils.SCHEMA_URL, badAvroUri); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.avro.AvroSerDe.class.getName()); + sd.setInputFormat(AvroContainerInputFormat.class.getName()); + sd.setOutputFormat(AvroContainerOutputFormat.class.getName()); + tbl.setPartitionKeys(new ArrayList()); client.createTable(tbl); client.close(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java index e059255..39e7005 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java @@ -32,7 +32,10 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.ShimLoader; /** @@ -100,6 +103,9 @@ public void testRetryingHMSHandler() throws Exception { sd.getSerdeInfo().setParameters(serdParams); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); sd.setSortCols(new ArrayList()); Table tbl = new Table(); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java index 5b5f44b..97fb7ba 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider; @@ -174,6 +176,8 @@ public void testSimplePrivileges() throws Exception { org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); ttbl.setPartitionKeys(new ArrayList()); MetaException me = null; diff --git ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java index e775bac..93981fa 100644 --- ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java +++ ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java @@ -40,11 +40,14 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.util.StringUtils; @@ -239,6 +242,9 @@ private void addSd(ArrayList cols, Table tbl) { sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.setSortCols(new ArrayList()); + sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); + sd.setInputFormat(HiveInputFormat.class.getName()); + sd.setOutputFormat(HiveOutputFormat.class.getName()); tbl.setSd(sd); }