.../hadoop/hive/serde2/avro/AvroDeserializer.java | 5 ++++- .../hive/serde2/avro/AvroLazyObjectInspector.java | 25 ++++++---------------- .../apache/hadoop/hive/serde2/avro/AvroSerDe.java | 8 ++----- .../hadoop/hive/serde2/avro/InstanceCache.java | 6 +++--- 4 files changed, 16 insertions(+), 28 deletions(-) diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java index ecfe15f..6645f09 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java @@ -163,7 +163,7 @@ public Object deserialize(List columnNames, List columnTypes, reEncoder = new SchemaReEncoder(r.getSchema(), readerSchema); reEncoderCache.put(recordReaderId, reEncoder); } else{ - LOG.debug("Adding new valid RRID :" + recordReaderId); + LOG.debug("Adding new valid RRID :{}", recordReaderId); noEncodingNeeded.add(recordReaderId); } if(reEncoder != null) { @@ -340,6 +340,9 @@ private Object deserializeSingleItemNullableUnion(Object datum, tag = GenericData.get().resolveUnion(fileSchema, datum); currentFileSchema = fileSchema.getTypes().get(tag); } catch (UnresolvedUnionException e) { + // TODO: This is a lot of code for a debug block. If this is + // important, is it being unit tested? If not, remove it or put it to + // WARN level logging if (LOG.isDebugEnabled()) { String datumClazz = null; if (datum != null) { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java index ff8ac36..9f5e3bd 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java @@ -125,10 +125,8 @@ public Object getStructFieldData(Object data, StructField f) { int fieldID = f.getFieldID(); - if (LOG.isDebugEnabled()) { - LOG.debug("Getting struct field data for field: [" + f.getFieldName() + "] on data [" - + data.getClass() + "]"); - } + LOG.debug("Getting struct field data for field: [{}] on data [{}]", f.getFieldName(), + data.getClass()); if (data instanceof LazyStruct) { LazyStruct row = (LazyStruct) data; @@ -137,10 +135,7 @@ public Object getStructFieldData(Object data, StructField f) { Object rowField = row.getField(fieldID); if (rowField instanceof LazyStruct) { - - if (LOG.isDebugEnabled() && rowField != null) { - LOG.debug("Deserializing struct [" + rowField.getClass() + "]"); - } + LOG.debug("Deserializing struct [{}]", rowField.getClass()); return deserializeStruct(rowField, f.getFieldName()); @@ -158,16 +153,12 @@ public Object getStructFieldData(Object data, StructField f) { } } - if (LOG.isDebugEnabled()) { - LOG.debug("Returning a lazy map for field [" + f.getFieldName() + "]"); - } + LOG.debug("Returning a lazy map for field [{}]", f.getFieldName()); return lazyMap; } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Returning [" + rowField + "] for field [" + f.getFieldName() + "]"); - } + LOG.debug("Returning [{}] for field [{}]", rowField, f.getFieldName()); // Just return the object. We need no further operation on it return rowField; @@ -263,10 +254,8 @@ private Object deserializeStruct(Object struct, String fieldName) { } // adjust the data bytes according to any possible offset that was provided - if (LOG.isDebugEnabled()) { - LOG.debug("Retrieved writer Schema: " + ws.toString()); - LOG.debug("Retrieved reader Schema: " + rs.toString()); - } + LOG.debug("Retrieved writer Schema: {}", ws); + LOG.debug("Retrieved reader Schema: {}", rs); try { avroWritable.readFields(data, offset, data.length, ws, rs); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java index 5467d8a..4df5c84 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java @@ -115,9 +115,7 @@ public void initialize(Configuration configuration, Properties properties) throw properties.setProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName(), schema.toString()); } - if (LOG.isDebugEnabled()) { - LOG.debug("Avro schema is " + schema); - } + LOG.debug("Avro schema is {}", schema); if (configuration == null) { LOG.debug("Configuration null, not inserting schema"); @@ -154,9 +152,7 @@ public static Schema getSchemaFromCols(Properties properties, //in MetaStoreUtils where this string columns.comments is generated columnComments = Arrays.asList(columnCommentProperty.split("\0")); - if (LOG.isDebugEnabled()) { - LOG.debug("columnComments is " + columnCommentProperty); - } + LOG.debug("columnComments is {}", columnCommentProperty); } if (columnNames.size() != columnTypes.size()) { throw new IllegalArgumentException("AvroSerde initialization failed. Number of column " + diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java index 2d52020..1975f0e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java @@ -53,14 +53,14 @@ public Instance retrieve(SeedObject hv) throws AvroSerdeException { */ public synchronized Instance retrieve(SeedObject hv, Set seenSchemas) throws AvroSerdeException { - if(LOG.isDebugEnabled()) LOG.debug("Checking for hv: " + hv.toString()); + LOG.debug("Checking for hv: {}", hv); if(cache.containsKey(hv)) { - if(LOG.isDebugEnabled()) LOG.debug("Returning cache result."); + LOG.debug("Returning cache result."); return cache.get(hv); } - if(LOG.isDebugEnabled()) LOG.debug("Creating new instance and storing in cache"); + LOG.debug("Creating new instance and storing in cache"); Instance instance = makeInstance(hv, seenSchemas); cache.put(hv, instance);