diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index 0ffdbe0..2ffbd8f 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -69,6 +69,7 @@ import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult; import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan; +import org.apache.hadoop.hive.metastore.model.MFieldSchema; import org.apache.hadoop.hive.metastore.parser.ExpressionTree; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; @@ -273,6 +274,7 @@ public void createTable(Table tbl) throws InvalidObjectException, MetaException Table tblCopy = tbl.deepCopy(); tblCopy.setDbName(HiveStringUtils.normalizeIdentifier(tblCopy.getDbName())); tblCopy.setTableName(HiveStringUtils.normalizeIdentifier(tblCopy.getTableName())); + normalizeColumnNames(tblCopy); getHBase().putTable(tblCopy); commit = true; } catch (IOException e) { @@ -283,6 +285,20 @@ public void createTable(Table tbl) throws InvalidObjectException, MetaException } } + private void normalizeColumnNames(Table tbl){ + tbl.getSd().setCols(normalizeFieldSchemaList(tbl.getSd().getCols())); + tbl.setPartitionKeys(normalizeFieldSchemaList(tbl.getPartitionKeys())); + } + + private List normalizeFieldSchemaList(List fieldschemas) { + List ret = new ArrayList<>(); + for (FieldSchema fieldSchema : fieldschemas) { + ret.add(new FieldSchema(HiveStringUtils.normalizeIdentifier(fieldSchema.getName()), + fieldSchema.getType(), fieldSchema.getComment())); + } + return ret; + } + @Override public boolean dropTable(String dbName, String tableName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException {