diff --git a/serde/if/test/complex.thrift b/serde/if/test/complex.thrift index 308b64c..79d6ec2 100644 --- a/serde/if/test/complex.thrift +++ b/serde/if/test/complex.thrift @@ -24,6 +24,9 @@ struct IntString { 3: i32 underscore_int; } +struct EmptyStruct { +} + struct Complex { 1: i32 aint; 2: string aString; @@ -31,4 +34,5 @@ struct Complex { 4: list lString; 5: list lintString; 6: map mStringString; + 7: EmptyStruct emptyStruct; } diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java index f5a2986..d9a7747 100644 --- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java +++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java @@ -29,6 +29,7 @@ public class Complex implements org.apache.thrift.TBase lString; // required private List lintString; // required private Map mStringString; // required + private EmptyStruct emptyStruct; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -44,7 +46,8 @@ public class Complex implements org.apache.thrift.TBase byName = new HashMap(); @@ -71,6 +74,8 @@ public class Complex implements org.apache.thrift.TBase lint, List lString, List lintString, - Map mStringString) + Map mStringString, + EmptyStruct emptyStruct) { this(); this.aint = aint; @@ -157,6 +165,7 @@ public class Complex implements org.apache.thrift.TBase, java.io.Serializable, Cloneable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("EmptyStruct"); + + + + /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ + public enum _Fields implements org.apache.thrift.TFieldIdEnum { +; + + private static final Map byName = new HashMap(); + + static { + for (_Fields field : EnumSet.allOf(_Fields.class)) { + byName.put(field.getFieldName(), field); + } + } + + /** + * Find the _Fields constant that matches fieldId, or null if its not found. + */ + public static _Fields findByThriftId(int fieldId) { + switch(fieldId) { + default: + return null; + } + } + + /** + * Find the _Fields constant that matches fieldId, throwing an exception + * if it is not found. + */ + public static _Fields findByThriftIdOrThrow(int fieldId) { + _Fields fields = findByThriftId(fieldId); + if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); + return fields; + } + + /** + * Find the _Fields constant that matches name, or null if its not found. + */ + public static _Fields findByName(String name) { + return byName.get(name); + } + + private final short _thriftId; + private final String _fieldName; + + _Fields(short thriftId, String fieldName) { + _thriftId = thriftId; + _fieldName = fieldName; + } + + public short getThriftFieldId() { + return _thriftId; + } + + public String getFieldName() { + return _fieldName; + } + } + public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; + static { + Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); + metaDataMap = Collections.unmodifiableMap(tmpMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(EmptyStruct.class, metaDataMap); + } + + public EmptyStruct() { + } + + /** + * Performs a deep copy on other. + */ + public EmptyStruct(EmptyStruct other) { + } + + public EmptyStruct deepCopy() { + return new EmptyStruct(this); + } + + @Override + public void clear() { + } + + public void setFieldValue(_Fields field, Object value) { + switch (field) { + } + } + + public Object getFieldValue(_Fields field) { + switch (field) { + } + throw new IllegalStateException(); + } + + /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ + public boolean isSet(_Fields field) { + if (field == null) { + throw new IllegalArgumentException(); + } + + switch (field) { + } + throw new IllegalStateException(); + } + + @Override + public boolean equals(Object that) { + if (that == null) + return false; + if (that instanceof EmptyStruct) + return this.equals((EmptyStruct)that); + return false; + } + + public boolean equals(EmptyStruct that) { + if (that == null) + return false; + + return true; + } + + @Override + public int hashCode() { + return 0; + } + + public int compareTo(EmptyStruct other) { + if (!getClass().equals(other.getClass())) { + return getClass().getName().compareTo(other.getClass().getName()); + } + + int lastComparison = 0; + EmptyStruct typedOther = (EmptyStruct)other; + + return 0; + } + + public _Fields fieldForId(int fieldId) { + return _Fields.findByThriftId(fieldId); + } + + public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { + org.apache.thrift.protocol.TField field; + iprot.readStructBegin(); + while (true) + { + field = iprot.readFieldBegin(); + if (field.type == org.apache.thrift.protocol.TType.STOP) { + break; + } + switch (field.id) { + default: + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type); + } + iprot.readFieldEnd(); + } + iprot.readStructEnd(); + validate(); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { + validate(); + + oprot.writeStructBegin(STRUCT_DESC); + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("EmptyStruct("); + boolean first = true; + + sb.append(")"); + return sb.toString(); + } + + public void validate() throws org.apache.thrift.TException { + // check for required fields + } + + private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { + try { + write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { + try { + read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + +} + diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java index 6c07ab5..5ac4703 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java @@ -365,7 +365,11 @@ public final class TypeInfoUtils { break; } } - Token name = expect("name"); + Token name = expect("name", ">"); + if (name.text.equals(">")) { + // empty struct + break; + } fieldNames.add(name.text); expect(":"); fieldTypeInfos.add(parseType()); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java index 5f692fb..bfafda9 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java @@ -26,6 +26,7 @@ import junit.framework.TestCase; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.thrift.test.Complex; +import org.apache.hadoop.hive.serde2.thrift.test.EmptyStruct; import org.apache.hadoop.hive.serde2.thrift.test.IntString; /** @@ -49,7 +50,7 @@ public class TestThriftObjectInspectors extends TestCase { assertEquals(Category.STRUCT, oi1.getCategory()); StructObjectInspector soi = (StructObjectInspector) oi1; List fields = soi.getAllStructFieldRefs(); - assertEquals(6, fields.size()); + assertEquals(7, fields.size()); assertEquals(fields.get(0), soi.getStructFieldRef("aint")); // null @@ -68,6 +69,8 @@ public class TestThriftObjectInspectors extends TestCase { List c4 = new ArrayList(); c.setLintString(c4); c.setMStringString(null); + EmptyStruct c6 = new EmptyStruct(); + c.setEmptyStruct(c6); assertEquals(1, soi.getStructFieldData(c, fields.get(0))); assertEquals("test", soi.getStructFieldData(c, fields.get(1))); @@ -75,8 +78,9 @@ public class TestThriftObjectInspectors extends TestCase { assertEquals(c3, soi.getStructFieldData(c, fields.get(3))); assertEquals(c4, soi.getStructFieldData(c, fields.get(4))); assertNull(soi.getStructFieldData(c, fields.get(5))); + assertEquals(c6, soi.getStructFieldData(c, fields.get(6))); ArrayList cfields = new ArrayList(); - for (int i = 0; i < 6; i++) { + for (int i = 0; i < fields.size(); i++) { cfields.add(soi.getStructFieldData(c, fields.get(i))); } assertEquals(cfields, soi.getStructFieldsDataAsList(c)); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java b/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java index 8aef773..9ea48ce 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.thrift.test.Complex; +import org.apache.hadoop.hive.serde2.thrift.test.EmptyStruct; import org.apache.hadoop.hive.serde2.thrift.test.IntString; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.SequenceFile; @@ -125,16 +126,17 @@ public final class CreateSequenceFile { islist.add(new IntString(i * i, "" + i * i * i, i)); HashMap hash = new HashMap(); hash.put("key_" + i, "value_" + i); + EmptyStruct emptyStruct = new EmptyStruct(); Complex complex = new Complex(rand.nextInt(), "record_" - + (new Integer(i)).toString(), alist, slist, islist, hash); + + (new Integer(i)).toString(), alist, slist, islist, hash, emptyStruct); Writable value = serializer.serialize(complex); writer.append(key, value); } // Add an all-null record - Complex complex = new Complex(0, null, null, null, null, null); + Complex complex = new Complex(0, null, null, null, null, null, null); Writable value = serializer.serialize(complex); writer.append(key, value);