diff --git a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java index 841c9eb..d6e9753 100644 --- a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java +++ b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java @@ -32,7 +32,11 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +45,6 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.mapred.SequenceFileInputFormat; -import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; @@ -568,12 +571,16 @@ public HCatCreateTableDesc build() throws HCatException { desc.fileFormat = fileFormat; if ("SequenceFile".equalsIgnoreCase(fileFormat)) { desc.inputformat = SequenceFileInputFormat.class.getName(); - desc.outputformat = SequenceFileOutputFormat.class + desc.outputformat = HiveSequenceFileOutputFormat.class .getName(); } else if ("RCFile".equalsIgnoreCase(fileFormat)) { desc.inputformat = RCFileInputFormat.class.getName(); desc.outputformat = RCFileOutputFormat.class.getName(); desc.serde = ColumnarSerDe.class.getName(); + } else if ("orcfile".equalsIgnoreCase(fileFormat)) { + desc.inputformat = OrcInputFormat.class.getName(); + desc.outputformat = OrcOutputFormat.class.getName(); + desc.serde = OrcSerde.class.getName(); } desc.storageHandler = StringUtils.EMPTY; } else if (!StringUtils.isEmpty(storageHandler)) { @@ -583,7 +590,7 @@ public HCatCreateTableDesc build() throws HCatException { LOG.info("Using text file format for the table."); desc.inputformat = TextInputFormat.class.getName(); LOG.info("Table input format:" + desc.inputformat); - desc.outputformat = IgnoreKeyTextOutputFormat.class + desc.outputformat = HiveIgnoreKeyTextOutputFormat.class .getName(); LOG.info("Table output format:" + desc.outputformat); } diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index 04029ed..aea3f31 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -30,9 +30,12 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.PartitionEventType; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; @@ -119,6 +122,7 @@ public void testBasicDDLCommands() throws Exception { String db = "testdb"; String tableOne = "testTable1"; String tableTwo = "testTable2"; + String tableThree = "testTable3"; HCatClient client = HCatClient.create(new Configuration(hcatConf)); client.dropDatabase(db, true, HCatClient.DropDBMode.CASCADE); @@ -170,7 +174,7 @@ public void testBasicDDLCommands() throws Exception { assertTrue(table2.getInputFileFormat().equalsIgnoreCase( TextInputFormat.class.getName())); assertTrue(table2.getOutputFileFormat().equalsIgnoreCase( - IgnoreKeyTextOutputFormat.class.getName())); + HiveIgnoreKeyTextOutputFormat.class.getName())); assertTrue("SerdeParams not found", table2.getSerdeParams() != null); assertEquals("checking " + serdeConstants.FIELD_DELIM, Character.toString('\001'), table2.getSerdeParams().get(serdeConstants.FIELD_DELIM)); @@ -186,6 +190,19 @@ public void testBasicDDLCommands() throws Exception { table2.getSerdeParams().get(serdeConstants.SERIALIZATION_NULL_FORMAT)); assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase()); + + HCatCreateTableDesc tableDesc3 = HCatCreateTableDesc.create(db, + tableThree, cols).fileFormat("orcfile").build(); + client.createTable(tableDesc3); + HCatTable table3 = client.getTable(db, tableThree); + assertTrue(table3.getInputFileFormat().equalsIgnoreCase( + OrcInputFormat.class.getName())); + assertTrue(table3.getOutputFileFormat().equalsIgnoreCase( + OrcOutputFormat.class.getName())); + assertTrue(table3.getSerdeLib().equalsIgnoreCase( + OrcSerde.class.getName())); + assertTrue(table1.getCols().equals(cols)); + client.close(); }