diff --git data/files/avro_date.txt data/files/avro_date.txt new file mode 100644 index 0000000000000000000000000000000000000000..0858896e5e89e042d8327da6c0f051141be2962a --- /dev/null +++ data/files/avro_date.txt @@ -0,0 +1,4 @@ +2012-02-21|foo:1980-12-16,bar:1998-05-07|2011-09-04,2011-09-05 +2014-02-11|baz:1981-12-16|2011-09-05 +1947-02-11|baz:1921-12-16|2011-09-05 +8200-02-11|baz:6981-12-16|1039-09-05 diff --git ql/src/test/queries/clientpositive/avro_date.q ql/src/test/queries/clientpositive/avro_date.q new file mode 100644 index 0000000000000000000000000000000000000000..996fa47a32ea84fdf70b0a1db08ea857dcdff02d --- /dev/null +++ ql/src/test/queries/clientpositive/avro_date.q @@ -0,0 +1,24 @@ +DROP TABLE avro_date_staging; +DROP TABLE avro_date; +DROP TABLE avro_date_casts; + +CREATE TABLE avro_date_staging (d date, m1 map, l1 array) + ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' + STORED AS TEXTFILE; + +LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging; + +CREATE TABLE avro_date (d date, m1 map, l1 array) + PARTITIONED BY (p1 int, p2 date) + ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' + STORED AS AVRO; + +INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging; + +SELECT * FROM avro_date; +SELECT d, COUNT(d) FROM avro_date GROUP BY d; +SELECT * FROM avro_date WHERE d!='1947-02-11'; +SELECT * FROM avro_date WHERE d<'2014-12-21'; +SELECT * FROM avro_date WHERE d>'8000-12-01'; diff --git ql/src/test/results/clientpositive/avro_date.q.out ql/src/test/results/clientpositive/avro_date.q.out new file mode 100644 index 0000000000000000000000000000000000000000..276a53c9dce898b6c167238507cb7f46ba4ffa78 --- /dev/null +++ ql/src/test/results/clientpositive/avro_date.q.out @@ -0,0 +1,126 @@ +PREHOOK: query: DROP TABLE avro_date_staging +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE avro_date_staging +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE avro_date +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE avro_date +POSTHOOK: type: DROPTABLE +PREHOOK: query: DROP TABLE avro_date_casts +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE avro_date_casts +POSTHOOK: type: DROPTABLE +PREHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map, l1 array) + ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' + STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@avro_date_staging +POSTHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map, l1 array) + ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' + STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@avro_date_staging +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@avro_date_staging +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@avro_date_staging +PREHOOK: query: CREATE TABLE avro_date (d date, m1 map, l1 array) + PARTITIONED BY (p1 int, p2 date) + ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' + STORED AS AVRO +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@avro_date +POSTHOOK: query: CREATE TABLE avro_date (d date, m1 map, l1 array) + PARTITIONED BY (p1 int, p2 date) + ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' + COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' + STORED AS AVRO +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@avro_date +PREHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging +PREHOOK: type: QUERY +PREHOOK: Input: default@avro_date_staging +PREHOOK: Output: default@avro_date@p1=2/p2=2014-09-26 +POSTHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging +POSTHOOK: type: QUERY +POSTHOOK: Input: default@avro_date_staging +POSTHOOK: Output: default@avro_date@p1=2/p2=2014-09-26 +POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).d SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:d, type:date, comment:null), ] +POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).l1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:l1, type:array, comment:null), ] +POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).m1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:m1, type:map, comment:null), ] +PREHOOK: query: SELECT * FROM avro_date +PREHOOK: type: QUERY +PREHOOK: Input: default@avro_date +PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM avro_date +POSTHOOK: type: QUERY +POSTHOOK: Input: default@avro_date +POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +2012-02-21 {"foo":"1980-12-16","bar":"1998-05-07"} ["2011-09-04","2011-09-05"] 2 2014-09-26 +2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26 +1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26 +8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26 +PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d +PREHOOK: type: QUERY +PREHOOK: Input: default@avro_date +PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +POSTHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d +POSTHOOK: type: QUERY +POSTHOOK: Input: default@avro_date +POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +1947-02-11 1 +2012-02-21 1 +2014-02-11 1 +8200-02-11 1 +PREHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11' +PREHOOK: type: QUERY +PREHOOK: Input: default@avro_date +PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@avro_date +POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +2012-02-21 {"foo":"1980-12-16","bar":"1998-05-07"} ["2011-09-04","2011-09-05"] 2 2014-09-26 +2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26 +8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26 +PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21' +PREHOOK: type: QUERY +PREHOOK: Input: default@avro_date +PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@avro_date +POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +2012-02-21 {"foo":"1980-12-16","bar":"1998-05-07"} ["2011-09-04","2011-09-05"] 2 2014-09-26 +2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26 +1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26 +PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01' +PREHOOK: type: QUERY +PREHOOK: Input: default@avro_date +PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@avro_date +POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26 +#### A masked pattern was here #### +8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26 diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java index 2fbe00091f12f90a4d5979ca701b30d78b0f1cb0..29262ba5d74eae9f0fb6ddc3c1ae85662b978a0c 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.rmi.server.UID; +import java.sql.Date; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -45,6 +46,7 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveDecimalObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -280,6 +282,12 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco str = datum.toString(); HiveVarchar hvc = new HiveVarchar(str, maxLength); return hvc; + case DATE: + if (recordSchema.getType() != Type.INT) { + throw new AvroSerdeException("Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType()); + } + + return new Date(DateWritable.daysToMillis((Integer)datum)); default: return datum; } diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java index a52b2edf08a5799c280cb18d9e98498dbf8aa763..b93121d617673fa455df70f97eed67254425678c 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java @@ -43,11 +43,13 @@ public static final String DECIMAL_TYPE_NAME = "decimal"; public static final String CHAR_TYPE_NAME = "char"; public static final String VARCHAR_TYPE_NAME = "varchar"; + public static final String DATE_TYPE_NAME = "date"; public static final String AVRO_PROP_LOGICAL_TYPE = "logicalType"; public static final String AVRO_PROP_PRECISION = "precision"; public static final String AVRO_PROP_SCALE = "scale"; public static final String AVRO_PROP_MAX_LENGTH = "maxLength"; public static final String AVRO_STRING_TYPE_NAME = "string"; + public static final String AVRO_INT_TYPE_NAME = "int"; private ObjectInspector oi; private List columnNames; diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java index 35a7a0ea286610a0ea8dbd7076c8539c66792df9..c8eac89fca276657dc04840ad993f5cad71661c0 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.serde2.avro; +import java.sql.Date; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -33,6 +34,7 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -40,6 +42,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; @@ -201,6 +204,9 @@ private Object serializePrimitive(TypeInfo typeInfo, PrimitiveObjectInspector fi case VARCHAR: HiveVarchar vc = (HiveVarchar)fieldOI.getPrimitiveJavaObject(structFieldData); return vc.getValue(); + case DATE: + Date date = ((DateObjectInspector)fieldOI).getPrimitiveJavaObject(structFieldData); + return DateWritable.dateToDays(date); case UNKNOWN: throw new AvroSerdeException("Received UNKNOWN primitive category."); case VOID: diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java index 640b645a70f77f57931f4b707cc7d44a95965608..c84b1a0d9846f937ff6958a46a0729395e1bba8a 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java @@ -150,6 +150,11 @@ public static TypeInfo generateTypeInfo(Schema schema) throws AvroSerdeException return TypeInfoFactory.getVarcharTypeInfo(maxLength); } + if (type == Schema.Type.INT && + AvroSerDe.DATE_TYPE_NAME.equals(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) { + return TypeInfoFactory.dateTypeInfo; + } + return typeInfoCache.retrieve(schema); } diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java index 8bf014b7ba95860578d430631c97f21567ea516d..8cb2dc3ca9bcdd0f24db2836e5858b680b7c4010 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java @@ -152,6 +152,11 @@ private Schema createAvroPrimitive(TypeInfo typeInfo) { "\"precision\":" + precision + "," + "\"scale\":" + scale + "}"); break; + case DATE: + schema = AvroSerdeUtils.getSchemaFor("{" + + "\"type\":\"" + AvroSerDe.AVRO_INT_TYPE_NAME + "\"," + + "\"logicalType\":\"" + AvroSerDe.DATE_TYPE_NAME + "\"}"); + break; case VOID: schema = Schema.create(Schema.Type.NULL); break; diff --git serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java index da58125a60c4ce6b9281147649894d6308d88906..c6b5cb6eb20bfc9142956e72a2483fea9dfbb9c4 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java +++ serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java @@ -19,9 +19,11 @@ package org.apache.hadoop.hive.serde2.avro; import com.google.common.io.Resources; + import org.junit.Assert; import org.apache.avro.Schema; import org.apache.commons.io.IOUtils; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; @@ -44,16 +46,28 @@ private static Logger LOGGER = Logger.getLogger(TestTypeInfoToSchema.class); private static final List COLUMN_NAMES = Arrays.asList("testCol"); - private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo("string"); - private static final TypeInfo INT = TypeInfoFactory.getPrimitiveTypeInfo("int"); - private static final TypeInfo BOOLEAN = TypeInfoFactory.getPrimitiveTypeInfo("boolean"); - private static final TypeInfo LONG = TypeInfoFactory.getPrimitiveTypeInfo("bigint"); - private static final TypeInfo FLOAT = TypeInfoFactory.getPrimitiveTypeInfo("float"); - private static final TypeInfo DOUBLE = TypeInfoFactory.getPrimitiveTypeInfo("double"); - private static final TypeInfo BINARY = TypeInfoFactory.getPrimitiveTypeInfo("binary"); - private static final TypeInfo BYTE = TypeInfoFactory.getPrimitiveTypeInfo("tinyint"); - private static final TypeInfo SHORT = TypeInfoFactory.getPrimitiveTypeInfo("smallint"); - private static final TypeInfo VOID = TypeInfoFactory.getPrimitiveTypeInfo("void"); + private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.STRING_TYPE_NAME); + private static final TypeInfo INT = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.INT_TYPE_NAME); + private static final TypeInfo BOOLEAN = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.BOOLEAN_TYPE_NAME); + private static final TypeInfo LONG = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.BIGINT_TYPE_NAME); + private static final TypeInfo FLOAT = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.FLOAT_TYPE_NAME); + private static final TypeInfo DOUBLE = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.DOUBLE_TYPE_NAME); + private static final TypeInfo BINARY = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.BINARY_TYPE_NAME); + private static final TypeInfo BYTE = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.TINYINT_TYPE_NAME); + private static final TypeInfo SHORT = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.SMALLINT_TYPE_NAME); + private static final TypeInfo VOID = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.VOID_TYPE_NAME); + private static final TypeInfo DATE = TypeInfoFactory.getPrimitiveTypeInfo( + serdeConstants.DATE_TYPE_NAME); private static final int PRECISION = 4; private static final int SCALE = 2; private static final TypeInfo DECIMAL = TypeInfoFactory.getPrimitiveTypeInfo( @@ -229,6 +243,17 @@ public void createAvroVarcharSchema() { } @Test + public void createAvroDateSchema() { + final String specificSchema = "{" + + "\"type\":\"int\"," + + "\"logicalType\":\"date\"}"; + String expectedSchema = genSchema(specificSchema); + + Assert.assertEquals("Test for date in avro schema failed", + expectedSchema, getAvroSchemaString(DATE)); + } + + @Test public void createAvroListSchema() { ListTypeInfo listTypeInfo = new ListTypeInfo(); listTypeInfo.setListElementTypeInfo(STRING); @@ -337,6 +362,7 @@ public void createAvroStructSchema() throws IOException { names.add("field11"); names.add("field12"); names.add("field13"); + names.add("field14"); structTypeInfo.setAllStructFieldNames(names); ArrayList typeInfos = new ArrayList(); typeInfos.add(STRING); @@ -351,6 +377,7 @@ public void createAvroStructSchema() throws IOException { typeInfos.add(DOUBLE); typeInfos.add(BOOLEAN); typeInfos.add(DECIMAL); + typeInfos.add(DATE); typeInfos.add(VOID); structTypeInfo.setAllStructFieldTypeInfos(typeInfos); LOGGER.info("structTypeInfo is " + structTypeInfo); diff --git serde/src/test/resources/avro-struct.avsc serde/src/test/resources/avro-struct.avsc index 007e6c2838bfcc1c345ce3cd590f74e30fb2e8b1..7bfcde9b5fb8c82469a10dca3e6f748391c1d292 100644 --- serde/src/test/resources/avro-struct.avsc +++ serde/src/test/resources/avro-struct.avsc @@ -4,7 +4,7 @@ "namespace":"", "doc":"struct", +field12:decimal(4,2),field13:date,field14:void>", "fields":[ {"name":"field1","type":["null","string"],"doc":"string","default":null}, {"name":"field2","type":["null",{"type":"string","logicalType":"char","maxLength":5}],"doc":"char(5)","default":null}, @@ -17,8 +17,8 @@ field12:decimal(4,2),field13:void>", {"name":"field9","type":["null","float"],"doc":"float","default":null}, {"name":"field10","type":["null","double"],"doc":"double","default":null}, {"name":"field11","type":["null","boolean"],"doc":"boolean","default":null}, -{"name":"field12","type":["null",{"type":"bytes","logicalType":"decimal","precision":4, -"scale":2}],"doc":"decimal(4,2)","default":null}, -{"name":"field13","type":"null","doc":"void","default":null} +{"name":"field12","type":["null",{"type":"bytes","logicalType":"decimal","precision":4,"scale":2}],"doc":"decimal(4,2)","default":null}, +{"name":"field13","type":["null",{"type":"int","logicalType":"date"}],"doc":"date","default":null}, +{"name":"field14","type":"null","doc":"void","default":null} ] }