diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java index 5733662..594a2ae 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatInputFormat.java @@ -27,6 +27,9 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; +import org.apache.hive.hcatalog.common.HCatConstants; +import org.apache.hive.hcatalog.common.HCatUtil; +import org.apache.hive.hcatalog.data.schema.HCatSchema; /** * The InputFormat to use to read data from HCatalog. @@ -140,4 +143,31 @@ public HCatInputFormat setProperties(Properties properties) throws IOException { } return this; } + + /** + * Return partitioning columns for this input, can only be called after setInput is called. + * @return partitioning columns of the table specified by the job. + * @throws IOException + */ + public static HCatSchema getPartitionColumns(Configuration conf) throws IOException { + InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize( + conf.get(HCatConstants.HCAT_KEY_JOB_INFO)); + Preconditions.checkNotNull(inputInfo, + "inputJobInfo is null, setInput has not yet been called to save job into conf supplied."); + return inputInfo.getTableInfo().getPartitionColumns(); + + } + + /** + * Return data columns for this input, can only be called after setInput is called. + * @return data columns of the table specified by the job. + * @throws IOException + */ + public static HCatSchema getDataColumns(Configuration conf) throws IOException { + InputJobInfo inputInfo = (InputJobInfo) HCatUtil.deserialize( + conf.get(HCatConstants.HCAT_KEY_JOB_INFO)); + Preconditions.checkNotNull(inputInfo, + "inputJobInfo is null, setInput has not yet been called to save job into conf supplied."); + return inputInfo.getTableInfo().getDataColumns(); + } } diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java new file mode 100644 index 0000000..edcb558 --- /dev/null +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatInputFormatMethods.java @@ -0,0 +1,80 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hive.hcatalog.mapreduce; + +import junit.framework.Assert; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; +import org.apache.hive.hcatalog.data.schema.HCatSchema; +import org.junit.Before; +import org.junit.Test; + +public class TestHCatInputFormatMethods extends HCatBaseTest { + + private boolean setUpComplete = false; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + if (setUpComplete) { + return; + } + + Assert.assertEquals(0, driver.run("drop table if exists testHCIFMethods").getResponseCode()); + Assert.assertEquals(0, driver.run( + "create table testHCIFMethods (a string, b int) partitioned by (x string, y string)") + .getResponseCode()); + + setUpComplete = true; + } + + + @Test + public void testGetPartitionAndDataColumns() throws Exception { + + Configuration conf = new Configuration(); + Job myJob = new Job(conf, "hcatTest"); + + HCatInputFormat.setInput(myJob, "default", "testHCIFMethods"); + HCatSchema cols = HCatInputFormat.getDataColumns(myJob.getConfiguration()); + + Assert.assertTrue(cols.getFields() != null); + Assert.assertEquals(cols.getFields().size(), 2); + Assert.assertTrue(cols.getFields().get(0).getName().equals("a")); + Assert.assertTrue(cols.getFields().get(1).getName().equals("b")); + Assert.assertTrue(cols.getFields().get(0).getType().equals(HCatFieldSchema.Type.STRING)); + Assert.assertTrue(cols.getFields().get(1).getType().equals(HCatFieldSchema.Type.INT)); + + HCatSchema pcols = HCatInputFormat.getPartitionColumns(myJob.getConfiguration()); + + Assert.assertTrue(pcols.getFields() != null); + Assert.assertEquals(pcols.getFields().size(), 2); + Assert.assertTrue(pcols.getFields().get(0).getName().equals("x")); + Assert.assertTrue(pcols.getFields().get(1).getName().equals("y")); + Assert.assertTrue(pcols.getFields().get(0).getType().equals(HCatFieldSchema.Type.STRING)); + Assert.assertTrue(pcols.getFields().get(1).getType().equals(HCatFieldSchema.Type.STRING)); + + } + +} +