Index: src/java/org/apache/hcatalog/data/schema/HCatSchema.java =================================================================== --- src/java/org/apache/hcatalog/data/schema/HCatSchema.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/schema/HCatSchema.java (working copy) @@ -27,7 +27,7 @@ import org.apache.hcatalog.common.HCatException; /** - * HCatSchema. This class is NOT thread-safe. + * The HCatalog schema class. This class is NOT thread-safe. */ public class HCatSchema implements Serializable{ @@ -39,10 +39,14 @@ private final List fieldNames; /** + * Instantiate an HCatSchema with a list of field schemas. * - * @param fieldSchemas is now owned by HCatSchema. Any subsequent modifications - * on fieldSchemas won't get reflected in HCatSchema. Each fieldSchema's name - * in the list must be unique, otherwise throws IllegalArgumentException. + * @param fieldSchemas + * is now owned by HCatSchema. Any subsequent modifications + * on fieldSchemas won't get reflected in HCatSchema. Each + * fieldSchema's name in the list must be unique, otherwise + * throws IllegalArgumentException. + * @throws IllegalArgumentException */ public HCatSchema(final List fieldSchemas){ this.fieldSchemas = new ArrayList(fieldSchemas); @@ -63,6 +67,12 @@ } } + /** + * + * @param hfs + * an HCatalog field schema + * @throws HCatException + */ public void append(final HCatFieldSchema hfs) throws HCatException{ if(hfs == null) throw new HCatException("Attempt to append null HCatFieldSchema in HCatSchema."); @@ -80,6 +90,8 @@ /** * Users are not allowed to modify the list directly, since HCatSchema * maintains internal state. Use append/remove to modify the schema. + * + * @return a list of field schemas */ public List getFields(){ return Collections.unmodifiableList(this.fieldSchemas); @@ -87,29 +99,57 @@ /** * @param fieldName - * @return the index of field named fieldName in Schema. If field is not - * present, returns null. + * the field name + * @return the index of a field named fieldName in Schema. If the field + * is not present, returns null. */ public Integer getPosition(String fieldName) { return fieldPositionMap.get(fieldName); } + /** + * + * @param fieldName + * the field name + * @return the schema for the specified field + * @throws HCatException + */ public HCatFieldSchema get(String fieldName) throws HCatException { return get(getPosition(fieldName)); } + /** + * + * @return a list of field names + */ public List getFieldNames(){ return this.fieldNames; } + /** + * + * @param position + * the field location + * @return the schema for the specified position + */ public HCatFieldSchema get(int position) { return fieldSchemas.get(position); } + /** + * + * @return the size + */ public int size(){ return fieldSchemas.size(); } + /** + * + * @param hcatFieldSchema + * the field schema to remove + * @throws HCatException + */ public void remove(final HCatFieldSchema hcatFieldSchema) throws HCatException { if(!fieldSchemas.contains(hcatFieldSchema)){ @@ -140,6 +180,10 @@ return sb.toString(); } + /** + * + * @return the schema as a String + */ public String getSchemaAsTypeString(){ boolean first = true; StringBuilder sb = new StringBuilder(); Index: src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java =================================================================== --- src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/schema/HCatSchemaUtils.java (working copy) @@ -102,8 +102,10 @@ /** - * Convert a HCatFieldSchema to a FieldSchema - * @param fs FieldSchema to convert + * Convert a FieldSchema to an HCatFieldSchema. + * + * @param fs + * FieldSchema to convert * @return HCatFieldSchema representation of FieldSchema * @throws HCatException */ Index: src/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java =================================================================== --- src/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/schema/HCatFieldSchema.java (working copy) @@ -23,27 +23,111 @@ public class HCatFieldSchema implements Serializable { + /** + * Enum representing the various HCatalog field types. + */ public enum Type { + /** + * The INT datatype uses 4 bytes for integers in the range -2^31 to 2^31-1. + */ INT, + + /** + * The TINYINT datatype uses 1 byte for integers in the range 0 to 255. + */ TINYINT, + + /** + * The SMALLINT datatype uses 2 bytes for integers in the range -2^15 to 2^15-1. + */ SMALLINT, + + /** + * The BIGINT datatype uses 8 bytes for integers in the range -2^63 to 2^63-1. + */ BIGINT, + + /** + * The BOOLEAN datatype uses a single bit to represent the logical values + * true (1) and false (0). + */ BOOLEAN, + + /** + * The FLOAT datatype is a single-precision floating point value + * that uses 32 bits of storage. + */ FLOAT, + + /** + * The DOUBLE datatype is a double-precision floating point value + * that uses 64 bits of storage. + */ DOUBLE, + + /** + * The STRING datatype is a character string. + */ STRING, + + /** + * The ARRAY datatype holds values of a single type. + */ ARRAY, + + /** + * The MAP datatype is a list of key-value pairs. + */ MAP, + + /** + * The STRUCT datatype is a structured type. + */ STRUCT, + + /** + * The BINARY datatype holds binary data. + */ BINARY, } + /** + * Enum representing various categories for the HCatalog field types. + * The PRIMITIVE category includes all types except ARRAY, MAP, and STRUCT. + */ public enum Category { + + /** + * Enum constant PRIMITIVE specifies the category for all primitive datatypes + * (INT, TINYINT, SMALLINT, BIGINT, BOOLEAN, FLOAT, DOUBLE, and STRING). + */ PRIMITIVE, + + /** + * Enum constant ARRAY specifies the category for the ARRAY datatype, + * which holds elements that are all of the same type. + */ ARRAY, + + /** + * Enum constant MAP specifies the category for the MAP datatype, + * which holds key-value pairs. + */ MAP, + + /** + * Enum constant STRUCT specifies the category for the STRUCT datatype, + * which holds structured data. + */ STRUCT; + /** + * Returns the category for a given field type. + * + * @param type + * datatype for the field + * @return the corresponding category (ARRAY, STRUCT, MAP, or PRIMITIVE) + */ public static Category fromType(Type type) { if (Type.ARRAY == type){ return ARRAY; @@ -57,6 +141,12 @@ } }; + /** + * Returns true for a complex field category + * (ARRAY, STRUCT, or MAP) or false for a primitive category. + * + * @return false if category is PRIMITIVE; otherwise true + */ public boolean isComplex(){ return (category == Category.PRIMITIVE) ? false : true; } @@ -88,7 +178,8 @@ } /** - * Returns type of the field + * Gets the datatype of the field. + * * @return type of the field */ public Type getType(){ @@ -96,7 +187,8 @@ } /** - * Returns category of the field + * Gets the category of the field. + * * @return category of the field */ public Category getCategory(){ @@ -104,22 +196,35 @@ } /** - * Returns name of the field + * Gets the name of the field. + * * @return name of the field */ public String getName(){ return fieldName; } + /** + * Gets the comment associated with the field. + * + * @return comment of the field + */ public String getComment(){ return comment; } /** - * Constructor constructing a primitive datatype HCatFieldSchema - * @param fieldName Name of the primitive field - * @param type Type of the primitive field - * @throws HCatException if call made on non-primitive types + * Constructor for constructing a primitive datatype HCatFieldSchema, + * passing a primitive field type and a comment. + * + * @param fieldName + * name of the primitive field + * @param type + * type of the primitive field + * @param comment + * a comment associated with the field + * @throws HCatException + * if call made on non-primitive types */ public HCatFieldSchema(String fieldName, Type type, String comment) throws HCatException { assertTypeInCategory(type,Category.PRIMITIVE,fieldName); @@ -130,13 +235,22 @@ } /** - * Constructor for constructing a ARRAY type or STRUCT type HCatFieldSchema, passing type and subschema - * @param fieldName Name of the array or struct field - * @param type Type of the field - either Type.ARRAY or Type.STRUCT - * @param subSchema - subschema of the struct, or element schema of the elements in the array - * @throws HCatException if call made on Primitive or Map types + * Constructor for constructing an ARRAY type or STRUCT type + * HCatFieldSchema, passing the field type, subschema, and a comment. + * + * @param fieldName + * name of the array or struct field + * @param type + * type of the field - either Type.ARRAY or Type.STRUCT + * @param subSchema + * subschema of the struct, or element schema of the elements + * in the array + * @param comment + * a comment + * @throws HCatException + * if call made on Primitive or Map type */ - public HCatFieldSchema(String fieldName, Type type, HCatSchema subSchema,String comment) throws HCatException{ + public HCatFieldSchema(String fieldName, Type type, HCatSchema subSchema, String comment) throws HCatException{ assertTypeNotInCategory(type,Category.PRIMITIVE); assertTypeNotInCategory(type,Category.MAP); this.fieldName = fieldName; @@ -154,12 +268,19 @@ } /** - * Constructor for constructing a MAP type HCatFieldSchema, passing type of key and value - * @param fieldName Name of the array or struct field - * @param type Type of the field - must be Type.MAP - * @param mapKeyType - key type of the Map - * @param mapValueSchema - subschema of the value of the Map - * @throws HCatException if call made on non-Map types + * Constructor for constructing a MAP type HCatFieldSchema, passing the + * field type (Type.MAP), key type, value schema, and a comment. + * + * @param fieldName + * name of the array or struct field + * @param type + * type of the field - must be Type.MAP + * @param mapKeyType + * key type of the Map + * @param mapValueSchema + * subschema of the value of the Map + * @throws HCatException + * if call made on non-Map types */ public HCatFieldSchema(String fieldName, Type type, Type mapKeyType, HCatSchema mapValueSchema, String comment) throws HCatException{ assertTypeInCategory(type,Category.MAP, fieldName); @@ -173,21 +294,45 @@ this.comment = comment; } + /** + * Gets the struct subschema. + * + * @return the subschema of the struct + * @throws HCatException + */ public HCatSchema getStructSubSchema() throws HCatException { assertTypeInCategory(this.type,Category.STRUCT, this.fieldName); return subSchema; } + /** + * Gets the array element schema. + * + * @return the array element schema + * @throws HCatException + */ public HCatSchema getArrayElementSchema() throws HCatException { assertTypeInCategory(this.type,Category.ARRAY, this.fieldName); return subSchema; } + /** + * Gets the key type of the Map. + * + * @return the key type of the Map + * @throws HCatException + */ public Type getMapKeyType() throws HCatException { assertTypeInCategory(this.type,Category.MAP, this.fieldName); return mapKeyType; } + /** + * Gets the subschema of the Map value. + * + * @return the subschema of the value of the Map + * @throws HCatException + */ public HCatSchema getMapValueSchema() throws HCatException { assertTypeInCategory(this.type,Category.MAP, this.fieldName); return subSchema; @@ -216,6 +361,11 @@ + (category != null ? "category=" + category : "category=null") + "]"; } + /** + * Gets the type as a string. + * + * @return a string for the type + */ public String getTypeString(){ if (typeString != null){ return typeString; @@ -242,6 +392,14 @@ return (typeString = sb.toString().toLowerCase()); } + /** + * Performs an equality comparison between this HCatFieldSchema and a + * specified object. + * + * @param obj + * an Object + * @return true if this HCatFieldSchema equals the object, otherwise false + */ @Override public boolean equals(Object obj) { if (this == obj) { Index: src/java/org/apache/hcatalog/data/HCatRecordable.java =================================================================== --- src/java/org/apache/hcatalog/data/HCatRecordable.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/HCatRecordable.java (working copy) @@ -23,33 +23,41 @@ import org.apache.hcatalog.common.HCatException; /** - * Interface that determines whether we can implement a HCatRecord on top of it + * Interface that determines whether we can implement an HCatRecord + * on top of it. */ public interface HCatRecordable extends Writable { /** - * Gets the field at the specified index. - * @param fieldNum the field number + * Gets the field at the specified index (field number). + * + * @param fieldNum + * the field number * @return the object at the specified index * @throws HCatException */ Object get(int fieldNum); /** - * Gets all the fields of the hcat record. + * Gets all the fields of the HCatRecord. + * * @return the list of fields */ List getAll(); /** * Sets the field at the specified index. - * @param fieldNum the field number - * @param value the value to set + * + * @param fieldNum + * the field number + * @param value + * the value to set */ void set(int fieldNum, Object value); /** - * Gets the size of the hcat record. + * Gets the size of the HCatRecord. + * * @return the size */ int size(); Index: src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java (working copy) @@ -22,15 +22,15 @@ import org.apache.hadoop.mapred.TaskTracker; /** - * If external system wants to communicate any state to slaves, they can do so - * via this interface. One example of this in case of Map-Reduce is ids assigned - * by {@link JobTracker} to {@link TaskTracker} + * If an external system wants to communicate any state to slaves, it can do so + * via this interface. One example of this in the case of Map-Reduce is ids + * assigned by {@link JobTracker} to {@link TaskTracker}. */ public interface StateProvider { /** - * This method should return id assigned to slave node. - * + * This method should return the identifier assigned to a slave node. + * * @return id */ public int getId(); Index: src/java/org/apache/hcatalog/data/transfer/WriterContext.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/WriterContext.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/WriterContext.java (working copy) @@ -27,10 +27,10 @@ import org.apache.hadoop.conf.Configuration; /** - * This contains information obtained at master node to help prepare slave nodes - * for writer. This class implements {@link Externalizable} so it can be - * serialized using standard java mechanisms. Master should serialize it and - * make it available to slaves to prepare for writes. + * This contains information obtained at the master node to help prepare slave + * nodes for writing. This class implements {@link Externalizable} so it can be + * serialized using standard Java mechanisms. The master should serialize it + * and make it available to slaves to prepare for writes. */ public class WriterContext implements Externalizable, Configurable { @@ -41,21 +41,50 @@ conf = new Configuration(); } + /** + * Get the configuration used by this writer. + * + * @return the writer's configuration + */ @Override public Configuration getConf() { return conf; } + /** + * Set the configuration to be used by this writer. + * + * @param config + * the configuration to set + */ @Override public void setConf(final Configuration config) { this.conf = config; } + /** + * Write the configuration to an external location. + * + * @param out + * the stream to write the output to + * @throws IOException + * if I/O errors occur + */ @Override public void writeExternal(ObjectOutput out) throws IOException { conf.write(out); } + /** + * Restore the contents of this WriterContext with data read from an + * external location. + * + * @param in + * the stream to read data from in order to restore writer context + * @throws IOException + * if I/O errors occur + * @throws ClassNotFoundException + */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { Index: src/java/org/apache/hcatalog/data/transfer/EntityBase.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/EntityBase.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/EntityBase.java (working copy) @@ -40,18 +40,38 @@ abstract static class Entity extends EntityBase { + /** + * Get the region. + * + * @return the name of the region + */ public String getRegion() { return region; } + /** + * Get the table name. + * + * @return the table name + */ public String getTableName() { return tableName; } + /** + * Get the database name. + * + * @return the database name + */ public String getDbName() { return dbName; } + /** + * Get the partition key values. + * + * @return a Map of partition key values + */ public Map getPartitionKVs() { return partitionKVs; } Index: src/java/org/apache/hcatalog/data/transfer/ReaderContext.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/ReaderContext.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/ReaderContext.java (working copy) @@ -33,7 +33,7 @@ /** * This class will contain information of different {@link InputSplit} obtained * at master node and configuration. This class implements - * {@link Externalizable} so it can be serialized using standard java + * {@link Externalizable} so it can be serialized using standard Java * mechanisms. */ public class ReaderContext implements Externalizable, Configurable { @@ -47,24 +47,54 @@ this.conf = new Configuration(); } + /** + * Set the input splits to be used by this reader. + * + * @param splits + * a List of input splits + */ public void setInputSplits(final List splits) { this.splits = splits; } + /** + * Get the input splits used by this reader. + * + * @return a list of input splits + */ public List getSplits() { return splits; } + /** + * Get the configuration used by this reader. + * + * @return the reader's configuration + */ @Override public Configuration getConf() { return conf; } + /** + * Set the configuration to be used by this reader. + * + * @param config + * the configuration to set + */ @Override public void setConf(final Configuration config) { conf = config; } + /** + * Write the input splits used by this reader to an external location. + * + * @param out + * the stream to write the output to + * @throws IOException + * if I/O errors occur + */ @Override public void writeExternal(ObjectOutput out) throws IOException { conf.write(out); @@ -74,6 +104,16 @@ } } + /** + * Read an external list of input splits to restore the contents of this + * ReaderContext. + * + * @param in + * the stream to read data from in order to restore reader context + * @throws IOException + * if I/O errors occur + * @throws ClassNotFoundException + */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { Index: src/java/org/apache/hcatalog/data/transfer/WriteEntity.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/WriteEntity.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/WriteEntity.java (working copy) @@ -20,13 +20,16 @@ import java.util.Map; +/** + * This class is used on the master node to instantiate an {@link HCatWriter} + * or an HCatOutputFormatWriter. + */ public class WriteEntity extends EntityBase.Entity { /** - * Don't instantiate {@link WriteEntity} directly. Use, {@link Builder} to + * Don't instantiate {@link WriteEntity} directly. Use {@link Builder} to * build {@link WriteEntity}. */ - private WriteEntity() { // Not allowed. } @@ -39,33 +42,59 @@ } /** - * This class should be used to build {@link WriteEntity}. It follows builder - * pattern, letting you build your {@link WriteEntity} with whatever level of - * detail you want. - * + * This class should be used to build a {@link WriteEntity}. It follows + * the builder pattern, letting you build your {@link WriteEntity} with + * whatever level of detail you want. */ public static class Builder extends EntityBase { + /** + * Build a {@link WriteEntity} specifying the region. + * + * @param region + * the region for this {@link WriteEntity} + */ public Builder withRegion(final String region) { this.region = region; return this; } + /** + * Build a {@link WriteEntity} specifying the database. + * + * @param dbName + * the name of the database + */ public Builder withDatabase(final String dbName) { this.dbName = dbName; return this; } + /** + * Build a {@link WriteEntity} specifying the table name. + * + * @param tblName + * the name of the table + */ public Builder withTable(final String tblName) { this.tableName = tblName; return this; } + /** + * Build a {@link WriteEntity} specifying the partition key values. + * + * @param partKVs + * a Map of partition key values + */ public Builder withPartition(final Map partKVs) { this.partitionKVs = partKVs; return this; } + /** + * Build a {@link WriteEntity} without specifying any parameters. + */ public WriteEntity build() { return new WriteEntity(this); } Index: src/java/org/apache/hcatalog/data/transfer/ReadEntity.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/ReadEntity.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/ReadEntity.java (working copy) @@ -20,14 +20,17 @@ import java.util.Map; +/** + * This class is used on the master node to instantiate an {@link HCatReader} + * or an HCatInputFormatReader. + */ public class ReadEntity extends EntityBase.Entity { private String filterString; /** - * Don't instantiate {@link ReadEntity} directly. Use, + * Don't instantiate {@link ReadEntity} directly. Use * {@link ReadEntity.Builder} instead. - * */ private ReadEntity() { // Not allowed @@ -42,45 +45,83 @@ this.filterString = builder.filterString; } + /** + * Get the filter string. + * + * @return the filter string + */ public String getFilterString() { return this.filterString; } /** - * This class should be used to build {@link ReadEntity}. It follows builder - * pattern, letting you build your {@link ReadEntity} with whatever level of - * detail you want. - * + * This class should be used to build a {@link ReadEntity}. It follows the + * builder pattern, letting you build your {@link ReadEntity} with whatever + * level of detail you want. + * */ public static class Builder extends EntityBase { private String filterString; + /** + * Build a {@link ReadEntity} specifying the region. + * + * @param region + * the region for this {@link ReadEntity} + */ public Builder withRegion(final String region) { this.region = region; return this; } + /** + * Build a {@link ReadEntity} specifying the database. + * + * @param dbName + * the name of the database + */ public Builder withDatabase(final String dbName) { this.dbName = dbName; return this; } + /** + * Build a {@link ReadEntity} specifying the table name. + * + * @param tblName + * the name of the table + */ public Builder withTable(final String tblName) { this.tableName = tblName; return this; } + /** + * Build a {@link ReadEntity} specifying the partition key values. + * + * @param partKVs + * a Map of partition key values + */ public Builder withPartition(final Map partKVs) { this.partitionKVs = partKVs; return this; } + /** + * Build a {@link ReadEntity} specifying a filter string. + * + * @param filterString + * the filter for this {@link ReadEntity} + */ public Builder withFilter(String filterString) { this.filterString = filterString; return this; } + /** + * Build a {@link ReadEntity} without specifying any parameters. + */ public ReadEntity build() { return new ReadEntity(this); } Index: src/java/org/apache/hcatalog/data/transfer/HCatWriter.java =================================================================== --- src/java/org/apache/hcatalog/data/transfer/HCatWriter.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/transfer/HCatWriter.java (working copy) @@ -30,7 +30,7 @@ /** * This abstraction is internal to HCatalog. This is to facilitate writing to * HCatalog from external systems. Don't try to instantiate this directly. - * Instead, use {@link DataTransferFactory} + * Instead, use {@link DataTransferFactory}. */ public abstract class HCatWriter { @@ -50,11 +50,11 @@ public abstract WriterContext prepareWrite() throws HCatException; /** - * This method should be used at slave needs to perform writes. + * This method should be used at slave nodes to perform writes. * * @param recordItr * {@link Iterator} records to be written into HCatalog. - * @throws {@link HCatException} + * @throws HCatException */ public abstract void write(final Iterator recordItr) throws HCatException; @@ -63,7 +63,7 @@ * This method should be called at master node. Primary purpose of this is to * do metadata commit. * - * @throws {@link HCatException} + * @throws HCatException */ public abstract void commit(final WriterContext context) throws HCatException; @@ -71,12 +71,12 @@ * This method should be called at master node. Primary purpose of this is to * do cleanups in case of failures. * - * @throws {@link HCatException} * + * @throws HCatException */ public abstract void abort(final WriterContext context) throws HCatException; /** - * This constructor will be used at master node + * This constructor will be used at master node. * * @param we * WriteEntity defines where in storage records should be written to. @@ -93,6 +93,10 @@ * This constructor will be used at slave nodes. * * @param config + * Any configuration which external system wants to communicate + to slave nodes. + * @param sp + * {@link StateProvider} for node identification */ protected HCatWriter(final Configuration config, final StateProvider sp) { this.conf = config; Index: src/java/org/apache/hcatalog/data/HCatRecord.java =================================================================== --- src/java/org/apache/hcatalog/data/HCatRecord.java (revision 1356106) +++ src/java/org/apache/hcatalog/data/HCatRecord.java (working copy) @@ -27,110 +27,393 @@ /** * Abstract class exposing get and set semantics for basic record usage. - * Note : - * HCatRecord is designed only to be used as in-memory representation only. + * Note: + * HCatRecord is designed to be used as in-memory representation only. * Don't use it to store data on the physical device. */ public abstract class HCatRecord implements HCatRecordable { + /** + * Abstract method to get the value of the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return the value of fieldName + * @throws HCatException + */ public abstract Object get(String fieldName, HCatSchema recordSchema) throws HCatException; + + /** + * Abstract method to set the value of a named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value the value of the field + * @throws HCatException + */ public abstract void set(String fieldName, HCatSchema recordSchema, Object value ) throws HCatException; + + /** + * Abstract method to remove the object at the indicated position in the + * record. + * + * @param idx + * the index + * @throws HCatException + */ public abstract void remove(int idx) throws HCatException; + + /** + * Abstract method to copy an HCatRecord. After this call will copy the + * pointers to the data, not the data itself. Note that this call will + * obliterate anything currently in this record. + * + * @param r + * an HCatalog record + * @throws HCatException + */ public abstract void copy(HCatRecord r) throws HCatException; + /** + * Protected method to get the value for a specified field name. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param clazz + * the field's datatype class + * @return an object containing the fieldName and recordSchema + * @throws HCatException + */ protected Object get(String fieldName, HCatSchema recordSchema, Class clazz) throws HCatException{ // TODO : if needed, verify that recordschema entry for fieldname matches appropriate type. return get(fieldName,recordSchema); } + /** + * Get a Boolean value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a Boolean value (true or false) + * @throws HCatException + */ public Boolean getBoolean(String fieldName, HCatSchema recordSchema) throws HCatException { return (Boolean) get(fieldName, recordSchema, Boolean.class); } + /** + * Set a Boolean value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Boolean value (true or false) + * @throws HCatException + */ public void setBoolean(String fieldName, HCatSchema recordSchema, Boolean value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a Byte value (TINYINT) from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a Byte value + * @throws HCatException + */ public Byte getByte(String fieldName, HCatSchema recordSchema) throws HCatException { //TINYINT return (Byte) get(fieldName, recordSchema, Byte.class); } + /** + * Set a Byte value (TINYINT) for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Byte value + * @throws HCatException + */ public void setByte(String fieldName, HCatSchema recordSchema, Byte value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a Short value (SMALLINT) from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a Short value + * @throws HCatException + */ public Short getShort(String fieldName, HCatSchema recordSchema) throws HCatException { // SMALLINT return (Short) get(fieldName, recordSchema, Short.class); } + /** + * Set a Short value (SMALLINT) for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Short value + * @throws HCatException + */ public void setShort(String fieldName, HCatSchema recordSchema, Short value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get an Integer value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return an Integer value + * @throws HCatException + */ public Integer getInteger(String fieldName, HCatSchema recordSchema) throws HCatException { return (Integer) get(fieldName,recordSchema, Integer.class); } + /** + * Set an Integer value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * an Integer value + * @throws HCatException + */ public void setInteger(String fieldName, HCatSchema recordSchema, Integer value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a Long value (BIGINT) from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a Long value + * @throws HCatException + */ public Long getLong(String fieldName, HCatSchema recordSchema) throws HCatException { // BIGINT return (Long) get(fieldName,recordSchema,Long.class); } + /** + * Set a Long value (BIGINT) for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Long value + * @throws HCatException + */ public void setLong(String fieldName, HCatSchema recordSchema, Long value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a floating-point value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a floating-point value + * @throws HCatException + */ public Float getFloat(String fieldName, HCatSchema recordSchema) throws HCatException { return (Float) get(fieldName,recordSchema,Float.class); } + /** + * Set a floating-point value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Float value + * @throws HCatException + */ public void setFloat(String fieldName, HCatSchema recordSchema, Float value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a double-precision floating-point value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a Double value + * @throws HCatException + */ public Double getDouble(String fieldName, HCatSchema recordSchema) throws HCatException { return (Double) get(fieldName,recordSchema,Double.class); } + /** + * Set a double-precision floating-point value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Double value + * @throws HCatException + */ public void setDouble(String fieldName, HCatSchema recordSchema, Double value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a String value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a String + * @throws HCatException + */ public String getString(String fieldName, HCatSchema recordSchema) throws HCatException { return (String) get(fieldName,recordSchema,String.class); } + /** + * Set a String value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a String + * @throws HCatException + */ public void setString(String fieldName, HCatSchema recordSchema, String value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a Struct value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a List + * @throws HCatException + */ @SuppressWarnings("unchecked") public List getStruct(String fieldName, HCatSchema recordSchema) throws HCatException { return (List) get(fieldName,recordSchema,List.class); } + /** + * Set a Struct value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a List + * @throws HCatException + */ public void setStruct(String fieldName, HCatSchema recordSchema, List value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a List value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a List + * @throws HCatException + */ public List getList(String fieldName, HCatSchema recordSchema) throws HCatException { return (List) get(fieldName,recordSchema,List.class); } + /** + * Set a List value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a List + * @throws HCatException + */ public void setList(String fieldName, HCatSchema recordSchema, List value) throws HCatException { set(fieldName,recordSchema,value); } + /** + * Get a Map value from the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @return a Map + * @throws HCatException + */ public Map getMap(String fieldName, HCatSchema recordSchema) throws HCatException { return (Map) get(fieldName,recordSchema,Map.class); } + /** + * Set a Map value for the named field. + * + * @param fieldName + * the name of the field + * @param recordSchema + * the record schema + * @param value + * a Map + * @throws HCatException + */ public void setMap(String fieldName, HCatSchema recordSchema, Map value) throws HCatException { set(fieldName,recordSchema,value); }