Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1467854)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -210,6 +210,7 @@
JOB_DEBUG_TIMEOUT("hive.exec.job.debug.timeout", 30000),
TASKLOG_DEBUG_TIMEOUT("hive.exec.tasklog.debug.timeout", 20000),
OUTPUT_FILE_EXTENSION("hive.output.file.extension", null),
+ OUTPUT_MARKSCHEMA("hive.output.markschema", false),
// should hive determine whether to run in local mode automatically ?
LOCALMODEAUTO("hive.exec.mode.local.auto", false),
Index: conf/hive-default.xml.template
===================================================================
--- conf/hive-default.xml.template (revision 1467854)
+++ conf/hive-default.xml.template (working copy)
@@ -1886,5 +1886,12 @@
+
+ hive.ouput.markschema
+ false
+
+
+
+
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 1467854)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy)
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.exec;
import java.io.IOException;
+import java.io.OutputStream;
import java.io.Serializable;
import java.security.AccessControlException;
import java.util.ArrayList;
@@ -35,6 +36,7 @@
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.Context;
@@ -202,6 +204,25 @@
if (lfd != null) {
Path targetPath = new Path(lfd.getTargetDir());
Path sourcePath = new Path(lfd.getSourceDir());
+ if (HiveConf.getBoolVar(conf, ConfVars.OUTPUT_MARKSCHEMA)) {
+ Path metaDataPath = new Path(sourcePath, "_metadata");
+ FileSystem fs = FileSystem.get(conf);
+ OutputStream out = fs.create(metaDataPath);
+ String resultSchema = "";
+ List schema = lfd.getAlias();
+ String fieldDelim = ":"; //lfd.getFieldDelim();
+ boolean first = true;
+ for (String col : schema) {
+ if (!first) {
+ resultSchema = resultSchema.concat(fieldDelim);
+ }
+ first = false;
+ resultSchema = resultSchema.concat(col);
+ }
+ resultSchema = resultSchema.concat("\n");
+ out.write(resultSchema.getBytes("UTF-8"));
+ out.close();
+ }
moveFile(sourcePath, targetPath, lfd.getIsDfsDir());
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1467854)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy)
@@ -5097,6 +5097,7 @@
+ dest_path, e);
}
}
+ List alias = new ArrayList();
String cols = "";
String colTypes = "";
ArrayList colInfos = inputRR.getColumnInfos();
@@ -5116,6 +5117,7 @@
if (nm[1] != null) { // non-null column alias
colInfo.setAlias(nm[1]);
+ alias.add(nm[1]);
}
if (field_schemas != null) {
@@ -5168,7 +5170,7 @@
}
boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
- loadFileWork.add(new LoadFileDesc(tblDesc, queryTmpdir, destStr, isDfsDir, cols,
+ loadFileWork.add(new LoadFileDesc(tblDesc, queryTmpdir, destStr, isDfsDir, alias, cols,
colTypes));
if (tblDesc == null) {
Index: ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java (revision 1467854)
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java (working copy)
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
+import java.util.List;
/**
* LoadFileDesc.
@@ -28,8 +29,8 @@
private static final long serialVersionUID = 1L;
private String targetDir;
private boolean isDfsDir;
- // list of columns, comma separated
- private String columns;
+ private List alias; // list of alias of the column (external name as seen by the users)
+ private String columns;// list of columns, comma separated
private String columnTypes;
private String destinationCreateTable;
@@ -38,8 +39,8 @@
public LoadFileDesc(final CreateTableDesc createTableDesc, final String sourceDir,
final String targetDir,
- final boolean isDfsDir, final String columns, final String columnTypes) {
- this(sourceDir, targetDir, isDfsDir, columns, columnTypes);
+ final boolean isDfsDir, final List alias, final String columns, final String columnTypes) {
+ this(sourceDir, targetDir, isDfsDir, alias, columns, columnTypes);
if (createTableDesc != null && createTableDesc.getDatabaseName() != null
&& createTableDesc.getTableName() != null) {
destinationCreateTable = (createTableDesc.getTableName().contains(".") ? "" : createTableDesc
@@ -49,11 +50,12 @@
}
public LoadFileDesc(final String sourceDir, final String targetDir,
- final boolean isDfsDir, final String columns, final String columnTypes) {
+ final boolean isDfsDir, final List alias, final String columns, final String columnTypes) {
super(sourceDir);
this.targetDir = targetDir;
this.isDfsDir = isDfsDir;
+ this.alias = alias;
this.columns = columns;
this.columnTypes = columnTypes;
}
@@ -77,6 +79,17 @@
}
/**
+ * @return the column alias
+ */
+ public List getAlias() {
+ return alias;
+ }
+
+ public void setAlias(List alias) {
+ this.alias = alias;
+ }
+
+ /**
* @return the columns
*/
public String getColumns() {