diff --git a/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java b/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java index 89b15d0..cc855da 100644 --- a/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java +++ b/hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java @@ -31,7 +31,11 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -39,7 +43,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.mapred.SequenceFileInputFormat; -import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.schema.HCatFieldSchema; @@ -496,12 +499,16 @@ public HCatCreateTableDesc build() throws HCatException { desc.fileFormat = fileFormat; if ("SequenceFile".equalsIgnoreCase(fileFormat)) { desc.inputformat = SequenceFileInputFormat.class.getName(); - desc.outputformat = SequenceFileOutputFormat.class + desc.outputformat = HiveSequenceFileOutputFormat.class .getName(); } else if ("RCFile".equalsIgnoreCase(fileFormat)) { desc.inputformat = RCFileInputFormat.class.getName(); desc.outputformat = RCFileOutputFormat.class.getName(); desc.serde = ColumnarSerDe.class.getName(); + } else if ("orcfile".equalsIgnoreCase(fileFormat)) { + desc.inputformat = OrcInputFormat.class.getName(); + desc.outputformat = OrcOutputFormat.class.getName(); + desc.serde = OrcSerde.class.getName(); } desc.storageHandler = StringUtils.EMPTY; } else if (!StringUtils.isEmpty(storageHandler)) { @@ -511,7 +518,7 @@ public HCatCreateTableDesc build() throws HCatException { LOG.info("Using text file format for the table."); desc.inputformat = TextInputFormat.class.getName(); LOG.info("Table input format:" + desc.inputformat); - desc.outputformat = IgnoreKeyTextOutputFormat.class + desc.outputformat = HiveIgnoreKeyTextOutputFormat.class .getName(); LOG.info("Table output format:" + desc.outputformat); } diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java index e755947..a640b1c 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java @@ -30,7 +30,10 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.PartitionEventType; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; @@ -109,6 +112,7 @@ public void testBasicDDLCommands() throws Exception { String db = "testdb"; String tableOne = "testTable1"; String tableTwo = "testTable2"; + String tableThree = "testTable3"; HCatClient client = HCatClient.create(new Configuration(hcatConf)); client.dropDatabase(db, true, HCatClient.DropDBMode.CASCADE); @@ -159,8 +163,21 @@ public void testBasicDDLCommands() throws Exception { assertTrue(table2.getInputFileFormat().equalsIgnoreCase( TextInputFormat.class.getName())); assertTrue(table2.getOutputFileFormat().equalsIgnoreCase( - IgnoreKeyTextOutputFormat.class.getName())); + HiveIgnoreKeyTextOutputFormat.class.getName())); assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase()); + + HCatCreateTableDesc tableDesc3 = HCatCreateTableDesc.create(db, + tableThree, cols).fileFormat("orcfile").build(); + client.createTable(tableDesc3); + HCatTable table3 = client.getTable(db, tableThree); + assertTrue(table3.getInputFileFormat().equalsIgnoreCase( + OrcInputFormat.class.getName())); + assertTrue(table3.getOutputFileFormat().equalsIgnoreCase( + OrcOutputFormat.class.getName())); + assertTrue(table3.getSerdeLib().equalsIgnoreCase( + OrcSerde.class.getName())); + assertTrue(table1.getCols().equals(cols)); + client.close(); }