diff --git a/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java b/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java index 87a0202..a44ab2d 100644 --- a/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java +++ b/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java @@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; +import org.apache.hcatalog.data.schema.HCatSchema; /** * The InputFormat to use to read data from HCatalog. @@ -134,4 +135,26 @@ public HCatInputFormat setProperties(Properties properties) throws IOException { } return this; } + + /** + * Return partitioning columns for this input, can only be called after setInput is called. + * @return partitioning columns of the table specified by the job. + */ + public HCatSchema getPartitionColumns() { + Preconditions.checkNotNull(this.inputJobInfo, + "inputJobInfo is null, setInput has not yet been called."); + return this.inputJobInfo.getTableInfo().getPartitionColumns(); + + } + + /** + * Return data columns for this input, can only be called after setInput is called. + * @return data columns of the table specified by the job. + */ + public HCatSchema getDataColumns() { + Preconditions.checkNotNull(this.inputJobInfo, + "inputJobInfo is null, setInput has not yet been called."); + return this.inputJobInfo.getTableInfo().getDataColumns(); + } + } diff --git a/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java b/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java index 8e3b2c2..6526603 100644 --- a/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java +++ b/hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java @@ -40,7 +40,7 @@ * Going forward, we plan on treating InputJobInfo as an implementation detail and no longer * expose to end-users. Should you have a need to use InputJobInfo outside HCatalog itself, * please contact the developer mailing list before depending on this class. - * @deprecated Use/modify {@link org.apache.hive.hcatalog.mapreduce.InputJobInfo} instead + * @deprecated Use/modify {@link org.apache.hive.hcatalog.mapreduce.HCatInputFormat} directly instead */ @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java index 2f24124..2318756 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java @@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; +import org.apache.hive.hcatalog.data.schema.HCatSchema; /** * The InputFormat to use to read data from HCatalog. @@ -39,24 +40,6 @@ private InputJobInfo inputJobInfo; /** - * @deprecated as of release 0.5, and will be removed in a future release - */ - @Deprecated - public static void setInput(Job job, InputJobInfo inputJobInfo) throws IOException { - setInput(job.getConfiguration(), inputJobInfo); - } - - /** - * @deprecated as of release 0.5, and will be removed in a future release - */ - @Deprecated - public static void setInput(Configuration conf, InputJobInfo inputJobInfo) throws IOException { - setInput(conf, inputJobInfo.getDatabaseName(), inputJobInfo.getTableName()) - .setFilter(inputJobInfo.getFilter()) - .setProperties(inputJobInfo.getProperties()); - } - - /** * See {@link #setInput(org.apache.hadoop.conf.Configuration, String, String)} */ public static HCatInputFormat setInput(Job job, String dbName, String tableName) throws IOException { @@ -133,4 +116,26 @@ public HCatInputFormat setProperties(Properties properties) throws IOException { } return this; } + + /** + * Return partitioning columns for this input, can only be called after setInput is called. + * @return partitioning columns of the table specified by the job. + */ + public HCatSchema getPartitionColumns() { + Preconditions.checkNotNull(this.inputJobInfo, + "inputJobInfo is null, setInput has not yet been called."); + return this.inputJobInfo.getTableInfo().getPartitionColumns(); + + } + + /** + * Return data columns for this input, can only be called after setInput is called. + * @return data columns of the table specified by the job. + */ + public HCatSchema getDataColumns() { + Preconditions.checkNotNull(this.inputJobInfo, + "inputJobInfo is null, setInput has not yet been called."); + return this.inputJobInfo.getTableInfo().getDataColumns(); + + } } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java index 9cfb1be..63b1fa3 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java @@ -79,20 +79,6 @@ public static InputJobInfo create(String databaseName, return new InputJobInfo(databaseName, tableName, filter, properties); } - /** - * Initializes a new InputJobInfo - * for reading data from a table. - * @param databaseName the db name - * @param tableName the table name - * @param filter the partition filter - */ - @Deprecated - public static InputJobInfo create(String databaseName, - String tableName, - String filter) { - return create(databaseName, tableName, filter, null); - } - private InputJobInfo(String databaseName, String tableName, diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java index 84b8b50..bbfd22b 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestInputJobInfo.java @@ -37,12 +37,4 @@ public void test4ArgCreate() throws Exception { Assert.assertEquals("value", jobInfo.getProperties().getProperty("key")); } - @Test - public void test3ArgCreate() throws Exception { - InputJobInfo jobInfo = InputJobInfo.create("Db", "Table", "Filter"); - Assert.assertEquals("Db", jobInfo.getDatabaseName()); - Assert.assertEquals("Table", jobInfo.getTableName()); - Assert.assertEquals("Filter", jobInfo.getFilter()); - Assert.assertEquals(0, jobInfo.getProperties().size()); - } } diff --git a/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java b/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java index 5cbe86d..c882b63 100644 --- a/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java +++ b/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/TestHBaseInputFormat.java @@ -29,7 +29,6 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -62,7 +61,6 @@ import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; import org.apache.hive.hcatalog.data.schema.HCatSchema; import org.apache.hive.hcatalog.mapreduce.HCatInputFormat; -import org.apache.hive.hcatalog.mapreduce.InputJobInfo; import org.junit.Test; public class TestHBaseInputFormat extends SkeletonHBaseTest { @@ -160,9 +158,7 @@ public void TestHBaseTableReadMR() throws Exception { MapReadHTable.resetCounters(); job.setInputFormatClass(HCatInputFormat.class); - InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName, - null); - HCatInputFormat.setInput(job, inputJobInfo); + HCatInputFormat.setInput(job, databaseName, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); @@ -225,10 +221,8 @@ public void TestHBaseTableProjectionReadMR() throws Exception { job.setJarByClass(this.getClass()); job.setMapperClass(MapReadProjHTable.class); job.setInputFormatClass(HCatInputFormat.class); - InputJobInfo inputJobInfo = InputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null); HCatInputFormat.setOutputSchema(job, getProjectionSchema()); - HCatInputFormat.setInput(job, inputJobInfo); + HCatInputFormat.setInput(job,MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class);