Index: hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java =================================================================== --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java (revision 1535192) +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java (working copy) @@ -22,6 +22,8 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.schema.HCatSchema; @@ -31,6 +33,8 @@ * HCatRecord is designed only to be used as in-memory representation only. * Don't use it to store data on the physical device. */ +@InterfaceAudience.Public +@InterfaceStability.Evolving public abstract class HCatRecord implements HCatRecordable { public abstract Object get(String fieldName, HCatSchema recordSchema) throws HCatException; Index: hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchema.java =================================================================== --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchema.java (revision 1535192) +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchema.java (working copy) @@ -25,12 +25,15 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.hcatalog.common.HCatException; /** * HCatSchema. This class is NOT thread-safe. */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public class HCatSchema implements Serializable { private static final long serialVersionUID = 1L; Index: hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/HCatReader.java =================================================================== --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/HCatReader.java (revision 1535192) +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/HCatReader.java (working copy) @@ -24,6 +24,8 @@ import java.util.Map.Entry; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.HCatRecord; import org.apache.hive.hcatalog.data.transfer.state.StateProvider; @@ -32,7 +34,8 @@ * This abstract class is internal to HCatalog and abstracts away the notion of * underlying system from which reads will be done. */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public abstract class HCatReader { /** Index: hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/HCatWriter.java =================================================================== --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/HCatWriter.java (revision 1535192) +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/transfer/HCatWriter.java (working copy) @@ -24,6 +24,8 @@ import java.util.Map.Entry; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.HCatRecord; import org.apache.hive.hcatalog.data.transfer.state.StateProvider; @@ -33,7 +35,8 @@ * HCatalog from external systems. Don't try to instantiate this directly. * Instead, use {@link DataTransferFactory} */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public abstract class HCatWriter { protected Configuration conf; Index: hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java =================================================================== --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java (revision 1535192) +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java (working copy) @@ -28,6 +28,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; @@ -52,6 +54,8 @@ /** The OutputFormat to use to write data to HCatalog. The key value is ignored and * should be given as null. The value is the HCatRecord to write.*/ +@InterfaceAudience.Public +@InterfaceStability.Evolving public class HCatOutputFormat extends HCatBaseOutputFormat { static final private Logger LOG = LoggerFactory.getLogger(HCatOutputFormat.class); Index: hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java =================================================================== --- hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java (revision 1535192) +++ hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java (working copy) @@ -27,6 +27,8 @@ import java.util.Properties; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.mapreduce.InputFormat; @@ -49,7 +51,8 @@ /** * Pig {@link org.apache.pig.LoadFunc} to read data from HCat */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public class HCatLoader extends HCatBaseLoader { private static final String PARTITION_FILTER = "partition.filter"; // for future use Index: hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java =================================================================== --- hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java (revision 1535192) +++ hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java (working copy) @@ -27,6 +27,8 @@ import java.util.Properties; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputFormat; @@ -48,7 +50,8 @@ * HCatStorer. * */ - +@InterfaceAudience.Public +@InterfaceStability.Evolving public class HCatStorer extends HCatBaseStorer { // Signature for wrapped storer, see comments in LoadFuncBasedInputDriver.initialize Index: hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java =================================================================== --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java (revision 1535192) +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java (working copy) @@ -28,6 +28,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hdfs.web.AuthFilter; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.GenericOptionsParser; @@ -45,6 +47,8 @@ /** * The main executable that starts up and runs the Server. */ +@InterfaceAudience.LimitedPrivate("Integration Tests") +@InterfaceStability.Unstable public class Main { public static final String SERVLET_PATH = "templeton"; private static final Log LOG = LogFactory.getLog(Main.class);