diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index 6f7f1fc..f656fae 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -60,6 +60,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; @@ -1888,7 +1889,7 @@ String dequote(String str) { static Map map(Object[] obs) { - Map m = new HashMap(); + Map m = new LinkedHashMap(); for (int i = 0; i < obs.length - 1; i += 2) { m.put(obs[i], obs[i + 1]); } diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java index 7ac0be6..0b16b83 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import java.util.HashMap; +import java.util.LinkedHashMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; @@ -219,7 +219,7 @@ public static Object serializeField(Object field, ObjectInspector fieldObjectIns private static Map serializeMap(Object f, MapObjectInspector moi) throws SerDeException { ObjectInspector koi = moi.getMapKeyObjectInspector(); ObjectInspector voi = moi.getMapValueObjectInspector(); - Map m = new HashMap(); + Map m = new LinkedHashMap(); Map readMap = moi.getMap(f); if (readMap == null) { diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java index d2954e0..cb1c459 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.sql.Date; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -95,7 +95,7 @@ public static Object readDatum(DataInput in) throws IOException { case DataType.MAP: int size = in.readInt(); - Map m = new HashMap(size); + Map m = new LinkedHashMap(size); for (int i = 0; i < size; i++) { m.put(readDatum(in), readDatum(in)); } diff --git hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java index 9fca565..ec620d2 100644 --- hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java +++ hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java @@ -27,6 +27,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -353,7 +354,7 @@ private Object getJavaObj(Object pigObj, HCatFieldSchema hcatFS) throws HCatExce return bagContents; case MAP: Map pigMap = (Map) pigObj; - Map typeMap = new HashMap(); + Map typeMap = new LinkedHashMap(); for (Entry entry : pigMap.entrySet()) { // the value has a schema and not a FieldSchema typeMap.put( diff --git ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java index 5f7f6e6..2b005c4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java @@ -27,11 +27,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; + import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; @@ -541,7 +542,7 @@ private UnionType nextUnion( } } - private HashMap nextMap( + private Map nextMap( ColumnVector vector, int row, MapTypeInfo schema, Object previous) { if (vector.isRepeating) { row = 0; @@ -552,11 +553,11 @@ private UnionType nextUnion( int offset = (int) map.offsets[row]; TypeInfo keyType = schema.getMapKeyTypeInfo(); TypeInfo valueType = schema.getMapValueTypeInfo(); - HashMap result; - if (previous == null || previous.getClass() != HashMap.class) { - result = new HashMap(length); + LinkedHashMap result; + if (previous == null || previous.getClass() != LinkedHashMap.class) { + result = new LinkedHashMap(length); } else { - result = (HashMap) previous; + result = (LinkedHashMap) previous; // I couldn't think of a good way to reuse the keys and value objects // without even more allocations, so take the easy and safe approach. result.clear(); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java index 9e476fa..36ebf87 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java @@ -40,7 +40,7 @@ import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -379,7 +379,7 @@ public Category getCategory() { @Override public Object create() { - return new HashMap(); + return new LinkedHashMap(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java index 308495a..5b001a0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java @@ -19,8 +19,9 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; @@ -534,10 +535,10 @@ static OrcUnion nextUnion(ColumnVector vector, } } - static HashMap nextMap(ColumnVector vector, - int row, - TypeDescription schema, - Object previous) { + static Map nextMap(ColumnVector vector, + int row, + TypeDescription schema, + Object previous) { if (vector.isRepeating) { row = 0; } @@ -547,11 +548,11 @@ static OrcUnion nextUnion(ColumnVector vector, int offset = (int) map.offsets[row]; TypeDescription keyType = schema.getChildren().get(0); TypeDescription valueType = schema.getChildren().get(1); - HashMap result; - if (previous == null || previous.getClass() != HashMap.class) { - result = new HashMap(length); + LinkedHashMap result; + if (previous == null || previous.getClass() != LinkedHashMap.class) { + result = new LinkedHashMap(length); } else { - result = (HashMap) previous; + result = (LinkedHashMap) previous; // I couldn't think of a good way to reuse the keys and value objects // without even more allocations, so take the easy and safe approach. result.clear(); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java index 3f508bd..dfcb33f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java @@ -13,7 +13,6 @@ */ package org.apache.hadoop.hive.ql.io.parquet.serde; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -106,27 +105,27 @@ public int getMapSize(final Object data) { @Override public Object create() { - Map m = new HashMap(); + Map m = new LinkedHashMap(); return m; } @Override public Object put(Object map, Object key, Object value) { - Map m = (HashMap) map; + Map m = (Map) map; m.put(key, value); return m; } @Override public Object remove(Object map, Object key) { - Map m = (HashMap) map; + Map m = (Map) map; m.remove(key); return m; } @Override public Object clear(Object map) { - Map m = (HashMap) map; + Map m = (Map) map; m.clear(); return m; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 3e7b3a1..2506172 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -771,7 +771,7 @@ private static ExprNodeConstantDesc toMapConstDesc(ColumnInfo colInfo, ObjectIns PrimitiveObjectInspector keyPoi = (PrimitiveObjectInspector)keyOI; PrimitiveObjectInspector valuePoi = (PrimitiveObjectInspector)valueOI; Map values = (Map)((ConstantObjectInspector) inspector).getWritableConstantValue(); - Map constant = new HashMap(); + Map constant = new LinkedHashMap(); for (Map.Entry e : values.entrySet()) { constant.put(keyPoi.getPrimitiveJavaObject(e.getKey()), valuePoi.getPrimitiveJavaObject(e.getValue())); } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java index 4412425..f257363 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java @@ -23,7 +23,7 @@ import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -457,7 +457,7 @@ public static void serializeWrite(SerializeWrite serializeWrite, MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo(); TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo(); - HashMap hashMap = (HashMap) object; + Map hashMap = (Map) object; serializeWrite.beginMap(hashMap); boolean isFirst = true; for (Map.Entry entry : hashMap.entrySet()) { @@ -630,7 +630,7 @@ private static Object getComplexField(DeserializeRead deserializeRead, MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo(); TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo(); - HashMap hashMap = new HashMap(); + Map hashMap = new LinkedHashMap(); Object keyObj; Object valueObj; boolean isNull; diff --git ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out index e75ba9d..55a8463 100644 --- ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out +++ ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out @@ -515,7 +515,7 @@ POSTHOOK: query: SELECT s2.f8 FROM nested_tbl_1 WHERE s1.f2 = 'foo' AND size(s2. POSTHOOK: type: QUERY POSTHOOK: Input: default@nested_tbl_1 #### A masked pattern was here #### -{"f9":true,"f10":[10,11],"f11":{"key2":false,"key1":true}} +{"f9":true,"f10":[10,11],"f11":{"key1":true,"key2":false}} PREHOOK: query: EXPLAIN SELECT col1, col2 FROM nested_tbl_1 LATERAL VIEW explode(s2.f8.f10) tbl1 AS col1 LATERAL VIEW explode(s3.f12) tbl2 AS col2 diff --git serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java index 83e5d68..b4c9c22 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java +++ serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java @@ -20,7 +20,7 @@ import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -262,7 +262,7 @@ private Object serializeMap(MapTypeInfo typeInfo, MapObjectInspector fieldOI, Ob Map map = fieldOI.getMap(structFieldData); Schema valueType = schema.getValueType(); - Map deserialized = new HashMap(fieldOI.getMapSize(structFieldData)); + Map deserialized = new LinkedHashMap(fieldOI.getMapSize(structFieldData)); for (Map.Entry entry : map.entrySet()) { deserialized.put(serialize(mapKeyTypeInfo, mapKeyObjectInspector, entry.getKey(), STRING_SCHEMA), diff --git serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java index e5b90c2..a48d4fe 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java +++ serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java @@ -23,7 +23,7 @@ import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -550,10 +550,10 @@ static Object deserialize(InputByteBuffer buffer, TypeInfo type, // Create the map if needed Map r; - if (reuse == null) { - r = new HashMap(); + if (reuse == null || reuse.getClass() != LinkedHashMap.class) { + r = new LinkedHashMap(); } else { - r = (HashMap) reuse; + r = (Map) reuse; r.clear(); } diff --git serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java index c7f4930..3f086cd 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java +++ serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.dynamic_type; import java.util.Collections; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; import org.apache.hadoop.hive.serde2.SerDeException; @@ -93,12 +93,12 @@ public String toString() { @Override public Map deserialize(Object reuse, TProtocol iprot) throws SerDeException, TException, IllegalAccessException { - HashMap deserializeReuse; - if (reuse != null) { - deserializeReuse = (HashMap) reuse; + Map deserializeReuse; + if (reuse != null && reuse.getClass() == LinkedHashMap.class) { + deserializeReuse = (Map) reuse; deserializeReuse.clear(); } else { - deserializeReuse = new HashMap(); + deserializeReuse = new LinkedHashMap(); } TMap themap = iprot.readMapBegin(); if (themap == null) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index 8823d41..635ea04 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -23,8 +23,8 @@ import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -445,7 +445,7 @@ public static Object copyToStandardObject( } case MAP: { MapObjectInspector moi = (MapObjectInspector) oi; - HashMap map = new HashMap(); + Map map = new LinkedHashMap(); Map omap = moi.getMap(o); for (Map.Entry entry : omap .entrySet()) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java index de41b97..bb28e65 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.serde2.objectinspector; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; /** @@ -100,27 +100,27 @@ public String getTypeName() { // SettableMapObjectInspector @Override public Object create() { - Map m = new HashMap(); + Map m = new LinkedHashMap(); return m; } @Override public Object clear(Object map) { - Map m = (HashMap) map; + Map m = (Map) map; m.clear(); return m; } @Override public Object put(Object map, Object key, Object value) { - Map m = (HashMap) map; + Map m = (Map) map; m.put(key, value); return m; } @Override public Object remove(Object map, Object key) { - Map m = (HashMap) map; + Map m = (Map) map; m.remove(key); return m; }