diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 1e2007b..8d491c4 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -221,6 +221,9 @@
// Max number of lines of footer user can set for a table file.
HIVE_FILE_MAX_FOOTER("hive.file.max.footer", 100),
+ // Make column names unique in the result set by using table alias if needed
+ HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES("hive.resultset.use.unique.column.names", true),
+
// Hadoop Configuration Properties
// Properties with null values are ignored and exist only for the purpose of giving us
// a symbolic name to reference in the Hive source code. Properties with non-null
diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template
index 8aaf3a0..b0d1e83 100644
--- a/conf/hive-default.xml.template
+++ b/conf/hive-default.xml.template
@@ -2476,6 +2476,16 @@
+ hive.resultset.use.unique.column.names
+ true
+
+ Make column names unique in the result set by qualifying column names with table alias if needed.
+ Table alias will be added to column names for queries of type "select *" or
+ if query explicitly uses table alias "select r1.x..".
+
+
+
+
hive.compat
0.12
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
index dac62d5..1ebedde 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
@@ -513,16 +513,17 @@ private void doTestSelectAll(String tableName, int maxRows, int fetchSize) throw
assertEquals(
"Unexpected column count", expectedColCount, meta.getColumnCount());
+ String colQualifier = ((tableName != null) && !tableName.isEmpty()) ? tableName.toLowerCase() + "." : "";
boolean moreRow = res.next();
while (moreRow) {
try {
i++;
- assertEquals(res.getInt(1), res.getInt("under_col"));
- assertEquals(res.getString(1), res.getString("under_col"));
- assertEquals(res.getString(2), res.getString("value"));
+ assertEquals(res.getInt(1), res.getInt(colQualifier + "under_col"));
+ assertEquals(res.getString(1), res.getString(colQualifier + "under_col"));
+ assertEquals(res.getString(2), res.getString(colQualifier + "value"));
if (isPartitionTable) {
assertEquals(res.getString(3), partitionedColumnValue);
- assertEquals(res.getString(3), res.getString(partitionedColumnName));
+ assertEquals(res.getString(3), res.getString(colQualifier + partitionedColumnName));
}
assertFalse("Last result value was not null", res.wasNull());
assertNull("No warnings should be found on ResultSet", res
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 125a982..0163788 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -911,16 +911,17 @@ private void doTestSelectAll(String tableName, int maxRows, int fetchSize) throw
assertEquals(
"Unexpected column count", expectedColCount, meta.getColumnCount());
+ String colQualifier = ((tableName != null) && !tableName.isEmpty()) ? tableName.toLowerCase() + "." : "";
boolean moreRow = res.next();
while (moreRow) {
try {
i++;
- assertEquals(res.getInt(1), res.getInt("under_col"));
- assertEquals(res.getString(1), res.getString("under_col"));
- assertEquals(res.getString(2), res.getString("value"));
+ assertEquals(res.getInt(1), res.getInt(colQualifier + "under_col"));
+ assertEquals(res.getString(1), res.getString(colQualifier + "under_col"));
+ assertEquals(res.getString(2), res.getString(colQualifier + "value"));
if (isPartitionTable) {
assertEquals(res.getString(3), partitionedColumnValue);
- assertEquals(res.getString(3), res.getString(partitionedColumnName));
+ assertEquals(res.getString(3), res.getString(colQualifier + partitionedColumnName));
}
assertFalse("Last result value was not null", res.wasNull());
assertNull("No warnings should be found on ResultSet", res
@@ -1866,7 +1867,7 @@ public void testFetchFirstQuery() throws Exception {
*/
@Test
public void testFetchFirstNonMR() throws Exception {
- execFetchFirst("select * from " + dataTypeTableName, "c4", false);
+ execFetchFirst("select * from " + dataTypeTableName, dataTypeTableName.toLowerCase() + "." + "c4", false);
}
/**
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index a6ff4cb..e3291be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -9217,8 +9217,11 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
// up with later.
Operator sinkOp = genPlan(qb);
- resultSchema =
- convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRowResolver());
+ if (createVwDesc != null)
+ resultSchema = convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRowResolver());
+ else
+ resultSchema = convertRowSchemaToResultSetSchema(opParseCtx.get(sinkOp).getRowResolver(),
+ HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES));
ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner,
opToPartList, topOps, topSelOps, opParseCtx, joinContext, smbMapJoinContext,
@@ -9406,15 +9409,30 @@ private void saveViewDefinition() throws SemanticException {
createVwDesc.setViewExpandedText(expandedText);
}
- private List convertRowSchemaToViewSchema(RowResolver rr) {
+ private List convertRowSchemaToViewSchema(RowResolver rr) throws SemanticException {
+ List fieldSchema = convertRowSchemaToResultSetSchema(rr, false);
+ ParseUtils.validateColumnNameUniqueness(fieldSchema);
+ return fieldSchema;
+ }
+
+ private List convertRowSchemaToResultSetSchema(RowResolver rr,
+ boolean useTabAliasIfAvailable) {
List fieldSchemas = new ArrayList();
+ String[] qualifiedColName;
+ String colName;
+
for (ColumnInfo colInfo : rr.getColumnInfos()) {
if (colInfo.isHiddenVirtualCol()) {
continue;
}
- String colName = rr.reverseLookup(colInfo.getInternalName())[1];
- fieldSchemas.add(new FieldSchema(colName,
- colInfo.getType().getTypeName(), null));
+
+ qualifiedColName = rr.reverseLookup(colInfo.getInternalName());
+ if (useTabAliasIfAvailable && qualifiedColName[0] != null && !qualifiedColName[0].isEmpty()) {
+ colName = qualifiedColName[0] + "." + qualifiedColName[1];
+ } else {
+ colName = qualifiedColName[1];
+ }
+ fieldSchemas.add(new FieldSchema(colName, colInfo.getType().getTypeName(), null));
}
return fieldSchemas;
}
diff --git a/ql/src/test/results/clientpositive/print_header.q.out b/ql/src/test/results/clientpositive/print_header.q.out
index e26af77..4e939e8 100644
--- a/ql/src/test/results/clientpositive/print_header.q.out
+++ b/ql/src/test/results/clientpositive/print_header.q.out
@@ -37,7 +37,7 @@ POSTHOOK: query: SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
#### A masked pattern was here ####
-key _c1
+src.key _c1
0 0.0
10 10.0
100 200.0