diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java index 8defe34..f27b0c7 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructField; @@ -113,7 +114,9 @@ public void initialize(Configuration conf, Properties tbl) } else { inputPattern = null; } - List columnNames = Arrays.asList(columnNameProperty.split(",")); + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java index 5a018ae..c294747 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java @@ -109,8 +109,9 @@ public void initialize(Configuration conf, Properties tbl) // Read the configuration parameters String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - - columnNames = Arrays.asList(columnNameProperty.split(",")); + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); columnTypes = null; if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList(); diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java index 235d186..989d6c2 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -79,12 +80,13 @@ public void initialize(Configuration conf, Properties tbl) // Get column names and types String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // all table column names if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } // all column types diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java index ef17079..831e857 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java @@ -114,12 +114,13 @@ public void initialize(Configuration conf, Properties tbl) // Get column names and types String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // all table column names if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } // all column types diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java index 1230795..8fd676f 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; @@ -162,6 +163,8 @@ private static Properties getSerdeProperties(HCatTableInfo info, HCatSchema s) List fields = HCatUtil.getFieldSchemaList(s.getFields()); props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, MetaStoreUtils.getColumnNamesFromFieldSchema(fields)); + props.setProperty(serdeConstants.COLUMN_NAME_DELIMITER, + MetaStoreUtils.getColumnNameDelimiter(fields)); props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils.getColumnTypesFromFieldSchema(fields)); props.setProperty("columns.comments", diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java index c28f096..b122602 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java @@ -49,9 +49,10 @@ public void initialize(Configuration conf, Properties tbl) // Read the configuration parameters String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // The input column can either be a string or a list of integer values. - List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java index 05d0590..6944fdd 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java @@ -50,9 +50,10 @@ public void initialize(Configuration conf, Properties tbl) // Read the configuration parameters String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // The input column can either be a string or a list of integer values. - List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java index 311718e..64e821a 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java @@ -40,9 +40,10 @@ public void initialize(Configuration conf, Properties tbl) // Read the configuration parameters String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - - // The input column can either be a string or a list of list of integer values. - List columnNames = Arrays.asList(columnNameProperty.split(",")); + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); + // The input column can either be a string or a list of integer values. + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java index 3504f5b..1c16e60 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java @@ -41,9 +41,10 @@ public void initialize(Configuration conf, Properties tbl) // Read the configuration parameters String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // The input column can either be a string or a list of integer values. - List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java index 670e5f2..f8f2a85 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java @@ -39,9 +39,10 @@ public void initialize(Configuration conf, Properties tbl) // Read the configuration parameters String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // The input column can either be a string or a list of integer values. - List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 4aea152..b21b9ed 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -1050,9 +1050,10 @@ public static Properties addCols(Properties schema, List cols) { StringBuilder colComment = new StringBuilder(); boolean first = true; + String columnNameDelimiter = getColumnNameDelimiter(cols); for (FieldSchema col : cols) { if (!first) { - colNameBuf.append(","); + colNameBuf.append(columnNameDelimiter); colTypeBuf.append(":"); colComment.append('\0'); } @@ -1064,6 +1065,7 @@ public static Properties addCols(Properties schema, List cols) { schema.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS, colNameBuf.toString()); + schema.setProperty(serdeConstants.COLUMN_NAME_DELIMITER, columnNameDelimiter); String colTypes = colTypeBuf.toString(); schema.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES, @@ -1177,15 +1179,25 @@ public static Properties getSchema( return addCols(getSchemaWithoutCols(sd, tblsd, parameters, databaseName, tableName, partitionKeys), tblsd.getCols()); } + public static String getColumnNameDelimiter(List fieldSchemas) { + // we first take a look if any fieldSchemas contain COMMA + for (int i = 0; i < fieldSchemas.size(); i++) { + if (fieldSchemas.get(i).getName().contains(",")) { + return String.valueOf(SerDeUtils.COLUMN_COMMENTS_DELIMITER); + } + } + return String.valueOf(SerDeUtils.COMMA); + } + /** * Convert FieldSchemas to columnNames. */ - public static String getColumnNamesFromFieldSchema( - List fieldSchemas) { + public static String getColumnNamesFromFieldSchema(List fieldSchemas) { + String delimiter = getColumnNameDelimiter(fieldSchemas); StringBuilder sb = new StringBuilder(); for (int i = 0; i < fieldSchemas.size(); i++) { if (i > 0) { - sb.append(","); + sb.append(delimiter); } sb.append(fieldSchemas.get(i).getName()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java index b0f8c8b..a179300 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java @@ -36,7 +36,9 @@ import org.apache.hadoop.hive.ql.io.RecordUpdater; import org.apache.hadoop.hive.ql.io.StatsProvidingRecordWriter; import org.apache.hadoop.hive.ql.io.orc.OrcSerde.OrcSerdeRow; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -141,11 +143,12 @@ public SerDeStats getStats() { !columnTypeProperty.isEmpty()) { List columnNames; List columnTypes; - + final String columnNameDelimiter = props.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? props + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java index 3ec9105..6dae512 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -84,13 +85,14 @@ public void initialize(Configuration conf, Properties table) { String columnNameProperty = table.getProperty(serdeConstants.LIST_COLUMNS); // NOTE: if "columns.types" is missing, all columns will be of String type String columnTypeProperty = table.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = table.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? table + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); String compressType = OrcConf.COMPRESS.getString(table, conf); // Parse the configuration parameters ArrayList columnNames = new ArrayList(); if (columnNameProperty != null && columnNameProperty.length() > 0) { - for (String name : columnNameProperty.split(",")) { + for (String name : columnNameProperty.split(columnNameDelimiter)) { columnNames.add(name); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java index bfb48a9..379a913 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.io.parquet.convert.HiveSchemaConverter; import org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport; import org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -39,7 +41,6 @@ import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.util.Progressable; - import org.apache.parquet.hadoop.ParquetOutputFormat; /** @@ -97,11 +98,12 @@ public void checkOutputSpecs(final FileSystem ignored, final JobConf job) throws final String columnTypeProperty = tableProperties.getProperty(IOConstants.COLUMNS_TYPES); List columnNames; List columnTypes; - + final String columnNameDelimiter = tableProperties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tableProperties + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java index 5870a50..6413c5a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java @@ -21,6 +21,7 @@ import java.util.Properties; import com.google.common.base.Preconditions; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.optimizer.FieldNode; import org.apache.hadoop.hive.serde.serdeConstants; @@ -29,6 +30,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -95,11 +97,12 @@ public final void initialize(final Configuration conf, final Properties tbl) thr // Get column names and sort order final String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); final String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); - + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index d3a1528..b2c5865 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -469,6 +469,7 @@ public static TableDesc getReduceKeyTableDesc(List fieldSchemas, SequenceFileInputFormat.class, SequenceFileOutputFormat.class, Utilities.makeProperties(serdeConstants.LIST_COLUMNS, MetaStoreUtils .getColumnNamesFromFieldSchema(fieldSchemas), + serdeConstants.COLUMN_NAME_DELIMITER, MetaStoreUtils.getColumnNameDelimiter(fieldSchemas), serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils .getColumnTypesFromFieldSchema(fieldSchemas), serdeConstants.SERIALIZATION_SORT_ORDER, order, @@ -496,6 +497,7 @@ public static TableDesc getMapJoinKeyTableDesc(Configuration conf, SequenceFileInputFormat.class, SequenceFileOutputFormat.class, Utilities.makeProperties(serdeConstants.LIST_COLUMNS, MetaStoreUtils .getColumnNamesFromFieldSchema(fieldSchemas), + serdeConstants.COLUMN_NAME_DELIMITER, MetaStoreUtils.getColumnNameDelimiter(fieldSchemas), serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils .getColumnTypesFromFieldSchema(fieldSchemas), serdeConstants.SERIALIZATION_SORT_ORDER, order.toString(), @@ -521,6 +523,7 @@ public static TableDesc getMapJoinValueTableDesc( SequenceFileOutputFormat.class, Utilities.makeProperties( serdeConstants.LIST_COLUMNS, MetaStoreUtils .getColumnNamesFromFieldSchema(fieldSchemas), + serdeConstants.COLUMN_NAME_DELIMITER, MetaStoreUtils.getColumnNameDelimiter(fieldSchemas), serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils .getColumnTypesFromFieldSchema(fieldSchemas), serdeConstants.ESCAPE_CHAR, "\\", @@ -536,6 +539,8 @@ public static TableDesc getIntermediateFileTableDesc( SequenceFileOutputFormat.class, Utilities.makeProperties( serdeConstants.LIST_COLUMNS, MetaStoreUtils .getColumnNamesFromFieldSchema(fieldSchemas), + serdeConstants.COLUMN_NAME_DELIMITER, MetaStoreUtils.getColumnNameDelimiter(fieldSchemas), + serdeConstants.COLUMN_NAME_DELIMITER, MetaStoreUtils.getColumnNameDelimiter(fieldSchemas), serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils .getColumnTypesFromFieldSchema(fieldSchemas), serdeConstants.ESCAPE_CHAR, "\\", diff --git a/ql/src/test/queries/clientpositive/comma_in_column_name.q b/ql/src/test/queries/clientpositive/comma_in_column_name.q new file mode 100644 index 0000000..cb8823e --- /dev/null +++ b/ql/src/test/queries/clientpositive/comma_in_column_name.q @@ -0,0 +1,14 @@ +create table test (`x,y` int); + +insert into test values (1),(2); + +select `x,y` from test where `x,y` >=2 ; + +drop table test; + +create table test (`x,y` int) stored as orc; + +insert into test values (1),(2); + +select `x,y` from test where `x,y` <2 ; + diff --git a/ql/src/test/results/clientpositive/comma_in_column_name.q.out b/ql/src/test/results/clientpositive/comma_in_column_name.q.out new file mode 100644 index 0000000..f8e319d --- /dev/null +++ b/ql/src/test/results/clientpositive/comma_in_column_name.q.out @@ -0,0 +1,56 @@ +PREHOOK: query: create table test (`x,y` int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test +POSTHOOK: query: create table test (`x,y` int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test +PREHOOK: query: insert into test values (1),(2) +PREHOOK: type: QUERY +PREHOOK: Output: default@test +POSTHOOK: query: insert into test values (1),(2) +POSTHOOK: type: QUERY +POSTHOOK: Output: default@test +POSTHOOK: Lineage: test.x,y EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: select `x,y` from test where `x,y` >=2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test +#### A masked pattern was here #### +POSTHOOK: query: select `x,y` from test where `x,y` >=2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test +#### A masked pattern was here #### +2 +PREHOOK: query: drop table test +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@test +PREHOOK: Output: default@test +POSTHOOK: query: drop table test +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@test +POSTHOOK: Output: default@test +PREHOOK: query: create table test (`x,y` int) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test +POSTHOOK: query: create table test (`x,y` int) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test +PREHOOK: query: insert into test values (1),(2) +PREHOOK: type: QUERY +PREHOOK: Output: default@test +POSTHOOK: query: insert into test values (1),(2) +POSTHOOK: type: QUERY +POSTHOOK: Output: default@test +POSTHOOK: Lineage: test.x,y EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: select `x,y` from test where `x,y` <2 +PREHOOK: type: QUERY +PREHOOK: Input: default@test +#### A masked pattern was here #### +POSTHOOK: query: select `x,y` from test where `x,y` <2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test +#### A masked pattern was here #### +1 diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift index 6caad36..1d40d5a 100644 --- a/serde/if/serde.thrift +++ b/serde/if/serde.thrift @@ -74,6 +74,7 @@ const string LIST_COLUMNS = "columns"; const string LIST_COLUMN_TYPES = "columns.types"; const string TIMESTAMP_FORMATS = "timestamp.formats"; +const string COLUMN_NAME_DELIMITER = "column.name.delimiter"; const set PrimitiveTypes = [ VOID_TYPE_NAME diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp index 3a675bf..907acf2 100644 --- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp +++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp @@ -99,6 +99,8 @@ serdeConstants::serdeConstants() { TIMESTAMP_FORMATS = "timestamp.formats"; + COLUMN_NAME_DELIMITER = "column.name.delimiter"; + PrimitiveTypes.insert("void"); PrimitiveTypes.insert("boolean"); PrimitiveTypes.insert("tinyint"); diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h index a5f33fb..8785bd2 100644 --- a/serde/src/gen/thrift/gen-cpp/serde_constants.h +++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h @@ -59,6 +59,7 @@ class serdeConstants { std::string LIST_COLUMNS; std::string LIST_COLUMN_TYPES; std::string TIMESTAMP_FORMATS; + std::string COLUMN_NAME_DELIMITER; std::set PrimitiveTypes; std::set CollectionTypes; std::set IntegralTypes; diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java index 04ed8f5..2578d3e 100644 --- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java +++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java @@ -124,6 +124,8 @@ public static final String TIMESTAMP_FORMATS = "timestamp.formats"; + public static final String COLUMN_NAME_DELIMITER = "column.name.delimiter"; + public static final Set PrimitiveTypes = new HashSet(); static { PrimitiveTypes.add("void"); diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php index 18c3991..ea2dbbe 100644 --- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php +++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php @@ -62,6 +62,7 @@ final class Constant extends \Thrift\Type\TConstant { static protected $LIST_COLUMNS; static protected $LIST_COLUMN_TYPES; static protected $TIMESTAMP_FORMATS; + static protected $COLUMN_NAME_DELIMITER; static protected $PrimitiveTypes; static protected $CollectionTypes; static protected $IntegralTypes; @@ -242,6 +243,10 @@ final class Constant extends \Thrift\Type\TConstant { return "timestamp.formats"; } + static protected function init_COLUMN_NAME_DELIMITER() { + return "column.name.delimiter"; + } + static protected function init_PrimitiveTypes() { return array( "void" => true, diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py index fafdc24..e3b24eb 100644 --- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py +++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py @@ -53,6 +53,7 @@ LIST_COLUMNS = "columns" LIST_COLUMN_TYPES = "columns.types" TIMESTAMP_FORMATS = "timestamp.formats" +COLUMN_NAME_DELIMITER = "column.name.delimiter" PrimitiveTypes = set([ "void", "boolean", diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb index 0ce9f27..15efaea 100644 --- a/serde/src/gen/thrift/gen-rb/serde_constants.rb +++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb @@ -95,6 +95,8 @@ LIST_COLUMN_TYPES = %q"columns.types" TIMESTAMP_FORMATS = %q"timestamp.formats" +COLUMN_NAME_DELIMITER = %q"column.name.delimiter" + PrimitiveTypes = Set.new([ %q"void", %q"boolean", diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java index 551a9da..e49b6dc 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java @@ -103,6 +103,8 @@ public void initialize(Configuration job, Properties tbl) throws SerDeException && serdeName.equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) { columnsetSerDe = true; } + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnProperty == null || columnProperty.length() == 0 || columnsetSerDe) { // Hack for tables with no columns @@ -111,7 +113,7 @@ public void initialize(Configuration job, Properties tbl) throws SerDeException .getReflectionObjectInspector(ColumnSet.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } else { - columnNames = Arrays.asList(columnProperty.split(",")); + columnNames = Arrays.asList(columnProperty.split(columnNameDelimiter)); cachedObjectInspector = MetadataListStructObjectInspector .getInstance(columnNames); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java index 752b907..156b410 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java @@ -121,8 +121,9 @@ public void initialize(Configuration conf, Properties tbl) "This table does not have serde property \"input.regex\"!"); } - - List columnNames = Arrays.asList(columnNameProperty.split(",")); + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); columnTypes = TypeInfoUtils .getTypeInfosFromTypeString(columnTypeProperty); assert columnNames.size() == columnTypes.size(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java index 7ffc964..6802a05 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java @@ -63,7 +63,9 @@ public static final char QUOTE = '"'; public static final char COLON = ':'; public static final char COMMA = ','; - public static final char COLUMN_COMMENTS_DELIMITER = '\0'; + // we should use '\0' for COLUMN_NAME_DELIMITER if column name contains COMMA + // but we should also take care of the backward compatibility + public static char COLUMN_COMMENTS_DELIMITER = '\0'; public static final String LBRACKET = "["; public static final String RBRACKET = "]"; public static final String LBRACE = "{"; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java index 0be54e0..e5f2c5e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -96,14 +97,16 @@ public void initialize(Configuration configuration, Properties properties) throw final String columnNameProperty = properties.getProperty(serdeConstants.LIST_COLUMNS); final String columnTypeProperty = properties.getProperty(serdeConstants.LIST_COLUMN_TYPES); final String columnCommentProperty = properties.getProperty(LIST_COLUMN_COMMENTS,""); - + final String columnNameDelimiter = properties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? properties + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); + if (hasExternalSchema(properties) || columnNameProperty == null || columnNameProperty.isEmpty() || columnTypeProperty == null || columnTypeProperty.isEmpty()) { schema = determineSchemaOrReturnErrorSchema(configuration, properties); } else { // Get column names and sort order - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); schema = getSchemaFromCols(properties, columnNames, columnTypes, columnCommentProperty); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java index b6b23c2..f18585d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.mapred.JobConf; @@ -118,8 +119,10 @@ public static Schema determineSchemaOrThrowException(Configuration conf, Propert || columnTypeProperty == null || columnTypeProperty.isEmpty() ) { throw new AvroSerdeException(EXCEPTION_MESSAGE); } + final String columnNameDelimiter = properties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? properties + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // Get column names and types - List columnNames = Arrays.asList(columnNameProperty.split(",")); + List columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); List columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); Schema schema = AvroSerDe.getSchemaFromCols(properties, columnNames, columnTypes, columnCommentProperty); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java index 0a035c6..89e15c3 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -152,10 +153,12 @@ public void initialize(Configuration conf, Properties tbl) // Get column names and sort order String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java index 7232d0b..ee4bb34 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -139,9 +140,10 @@ public void extractColumnInfo() throws SerDeException { String columnTypeProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMN_TYPES); // Parse the configuration parameters - + String columnNameDelimiter = tableProperties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tableProperties + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty != null && columnNameProperty.length() > 0) { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } else { columnNames = new ArrayList(); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java index 99abb5d..56b4ca3 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -108,11 +109,13 @@ public void initialize(Configuration conf, Properties tbl) throws SerDeException { // Get column names and types String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java index 5c31974..84ed6ba 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -73,10 +74,12 @@ public void initialize(Configuration conf, Properties tbl) throws SerDeException MAX_BUFFERED_ROWS = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_MAX_FETCH_SIZE); String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); + final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty.length() == 0) { columnNames = new ArrayList(); } else { - columnNames = Arrays.asList(columnNameProperty.split(",")); + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList();