Index: src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java (revision 1304179) +++ src/java/org/apache/hcatalog/mapreduce/HCatBaseInputFormat.java (working copy) @@ -27,6 +27,9 @@ import java.util.List; import java.util.Properties; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -62,6 +65,8 @@ import org.apache.hcatalog.data.schema.HCatFieldSchema; import org.apache.hcatalog.data.schema.HCatSchema; +@InterfaceAudience.Public +@InterfaceStability.Stable public abstract class HCatBaseInputFormat extends InputFormat { Index: src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java (revision 1304179) +++ src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java (working copy) @@ -17,6 +17,8 @@ */ package org.apache.hcatalog.mapreduce; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import java.io.Serializable; @@ -26,6 +28,8 @@ import java.util.Properties; /** The class used to serialize and store the information read from the metadata server */ +@InterfaceAudience.Public +@InterfaceStability.Stable public class InputJobInfo implements Serializable{ /** The serialization version */ Index: src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java (revision 1304179) +++ src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java (working copy) @@ -22,6 +22,9 @@ import java.util.Map.Entry; import java.util.Properties; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.Writable; @@ -49,6 +52,8 @@ * the underlying record reader is done with the underlying split, * not with HCatSplit. */ +@InterfaceAudience.Public +@InterfaceStability.Stable class HCatRecordReader extends RecordReader { Log LOG = LogFactory.getLog(HCatRecordReader.class); Index: src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java (revision 1304179) +++ src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java (working copy) @@ -23,6 +23,9 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -39,6 +42,8 @@ import org.apache.hcatalog.data.HCatRecord; import org.apache.hcatalog.data.schema.HCatSchema; +@InterfaceAudience.Public +@InterfaceStability.Stable public abstract class HCatBaseOutputFormat extends OutputFormat, HCatRecord> { // static final private Log LOG = LogFactory.getLog(HCatBaseOutputFormat.class); Index: src/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java (revision 1304179) +++ src/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java (working copy) @@ -25,10 +25,15 @@ import java.util.Map; import java.util.Properties; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.metastore.MetaStoreUtils; + import org.apache.hcatalog.data.schema.HCatSchema; /** The class used to serialize and store the output related information */ +@InterfaceAudience.Public +@InterfaceStability.Stable public class OutputJobInfo implements Serializable { /** The db and table names. */ Index: src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java (revision 1304179) +++ src/java/org/apache/hcatalog/mapreduce/HCatInputFormat.java (working copy) @@ -20,9 +20,14 @@ import java.io.IOException; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.mapreduce.Job; /** The InputFormat to use to read data from HCat */ +@InterfaceAudience.Public +@InterfaceStability.Stable public class HCatInputFormat extends HCatBaseInputFormat { /** Index: src/java/org/apache/hcatalog/common/ErrorType.java =================================================================== --- src/java/org/apache/hcatalog/common/ErrorType.java (revision 1304179) +++ src/java/org/apache/hcatalog/common/ErrorType.java (working copy) @@ -17,9 +17,14 @@ */ package org.apache.hcatalog.common; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + /** * Enum type representing the various errors throws by HCat. */ +@InterfaceAudience.Public +@InterfaceStability.Stable public enum ErrorType { /* HCat Input Format related errors 1000 - 1999 */ Index: src/java/org/apache/hcatalog/common/HCatException.java =================================================================== --- src/java/org/apache/hcatalog/common/HCatException.java (revision 1304179) +++ src/java/org/apache/hcatalog/common/HCatException.java (working copy) @@ -17,11 +17,16 @@ */ package org.apache.hcatalog.common; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import java.io.IOException; /** * Class representing exceptions thrown by HCat. */ +@InterfaceAudience.Public +@InterfaceStability.Stable public class HCatException extends IOException { private static final long serialVersionUID = 1L; Index: src/java/org/apache/hcatalog/data/schema/HCatSchema.java =================================================================== --- src/java/org/apache/hcatalog/data/schema/HCatSchema.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/schema/HCatSchema.java (working copy) @@ -24,12 +24,16 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hcatalog.common.HCatException; /** * HCatSchema. This class is NOT thread-safe. */ - +@InterfaceAudience.Public +@InterfaceStability.Stable public class HCatSchema implements Serializable{ private static final long serialVersionUID = 1L; Index: src/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java =================================================================== --- src/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java (working copy) @@ -19,8 +19,13 @@ import java.io.Serializable; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hcatalog.common.HCatException; +@InterfaceAudience.Public +@InterfaceStability.Stable public class HCatFieldSchema implements Serializable { public enum Type { Index: src/java/org/apache/hcatalog/data/HCatRecordable.java =================================================================== --- src/java/org/apache/hcatalog/data/HCatRecordable.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/HCatRecordable.java (working copy) @@ -19,12 +19,17 @@ import java.util.List; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.io.Writable; import org.apache.hcatalog.common.HCatException; /** * Interface that determines whether we can implement a HCatRecord on top of it */ +@InterfaceAudience.Public +@InterfaceStability.Stable public interface HCatRecordable extends Writable { /** Index: src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java (working copy) @@ -18,6 +18,9 @@ package org.apache.hcatalog.data.transfer.state; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.mapred.JobTracker; import org.apache.hadoop.mapred.TaskTracker; @@ -25,6 +28,8 @@ * One example of this in case of Map-Reduce is ids assigned by {@link JobTracker} to * {@link TaskTracker} */ +@InterfaceAudience.Public +@InterfaceStability.Evolving public interface StateProvider { /** This method should return id assigned to slave node. Index: src/java/org/apache/hcatalog/data/transfer/WriterContext.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/WriterContext.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/WriterContext.java (working copy) @@ -23,6 +23,9 @@ import java.io.ObjectInput; import java.io.ObjectOutput; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; @@ -31,6 +34,8 @@ * standard java mechanisms. Master should serialize it and make it available to slaves to * prepare for writes. */ +@InterfaceAudience.Public +@InterfaceStability.Evolving public class WriterContext implements Externalizable, Configurable{ private static final long serialVersionUID = -5899374262971611840L; Index: src/java/org/apache/hcatalog/data/transfer/HCatReader.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/HCatReader.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/HCatReader.java (working copy) @@ -22,6 +22,9 @@ import java.util.Map; import java.util.Map.Entry; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.conf.Configuration; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.HCatRecord; @@ -30,7 +33,8 @@ /** This abstract class is internal to HCatalog and abstracts away the notion of * underlying system from which reads will be done. */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public abstract class HCatReader{ /** This should be called at master node to obtain {@link ReaderContext} which then should be Index: src/java/org/apache/hcatalog/data/transfer/DataTransferFactory.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/DataTransferFactory.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/DataTransferFactory.java (working copy) @@ -20,6 +20,9 @@ import java.util.Map; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hcatalog.data.transfer.impl.HCatInputFormatReader; @@ -29,7 +32,8 @@ /** Use this factory to get instances of {@link HCatReader} or {@link HCatWriter} at master and slave nodes. */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public class DataTransferFactory { /** Index: src/java/org/apache/hcatalog/data/transfer/EntityBase.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/EntityBase.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/EntityBase.java (working copy) @@ -18,12 +18,16 @@ package org.apache.hcatalog.data.transfer; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import java.util.Map; /** This is a base class for {@link ReadEntity.Builder} / {@link WriteEntity.Builder}. Many fields in them are common, * so this class contains the common fields. */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving abstract class EntityBase { String region; Index: src/java/org/apache/hcatalog/data/transfer/ReaderContext.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/ReaderContext.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/ReaderContext.java (working copy) @@ -25,6 +25,9 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputSplit; @@ -34,6 +37,8 @@ * and configuration. This class implements {@link Externalizable} so it can be serialized using * standard java mechanisms. */ +@InterfaceAudience.Public +@InterfaceStability.Evolving public class ReaderContext implements Externalizable, Configurable { private static final long serialVersionUID = -2656468331739574367L; Index: src/java/org/apache/hcatalog/data/transfer/WriteEntity.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/WriteEntity.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/WriteEntity.java (working copy) @@ -18,8 +18,13 @@ package org.apache.hcatalog.data.transfer; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import java.util.Map; +@InterfaceAudience.Public +@InterfaceStability.Evolving public class WriteEntity extends EntityBase.Entity{ /** Don't instantiate {@link WriteEntity} directly. Use, {@link Builder} to build Index: src/java/org/apache/hcatalog/data/transfer/ReadEntity.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/ReadEntity.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/ReadEntity.java (working copy) @@ -18,8 +18,13 @@ package org.apache.hcatalog.data.transfer; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import java.util.Map; +@InterfaceAudience.Public +@InterfaceStability.Evolving public class ReadEntity extends EntityBase.Entity{ private String filterString; @@ -82,4 +87,4 @@ return new ReadEntity(this); } } -} \ No newline at end of file +} Index: src/java/org/apache/hcatalog/data/transfer/HCatWriter.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/HCatWriter.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/transfer/HCatWriter.java (working copy) @@ -22,6 +22,9 @@ import java.util.Map; import java.util.Map.Entry; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hadoop.conf.Configuration; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.HCatRecord; @@ -30,7 +33,8 @@ /** This abstraction is internal to HCatalog. This is to facilitate writing to HCatalog from external * systems. Don't try to instantiate this directly. Instead, use {@link DataTransferFactory} */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public abstract class HCatWriter { protected Configuration conf; Index: src/java/org/apache/hcatalog/data/HCatRecord.java =================================================================== --- src/java/org/apache/hcatalog/data/HCatRecord.java (revision 1304179) +++ src/java/org/apache/hcatalog/data/HCatRecord.java (working copy) @@ -21,6 +21,9 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; + import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.common.HCatUtil; import org.apache.hcatalog.data.schema.HCatSchema; @@ -31,6 +34,8 @@ * HCatRecord is designed only to be used as in-memory representation only. * Don't use it to store data on the physical device. */ +@InterfaceAudience.Public +@InterfaceStability.Stable public abstract class HCatRecord implements HCatRecordable { public abstract Object get(String fieldName, HCatSchema recordSchema) throws HCatException;