Index: conf/hive-default.xml
===================================================================
--- conf/hive-default.xml (revision 955109)
+++ conf/hive-default.xml (working copy)
@@ -583,4 +583,10 @@
numbers, this conf var needs to be set manually.
+
+ hive.variable.substitute
+ true
+ This enables substituation using syntax like ${var} ${system:var} and ${env:var}.
+
+
Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 955109)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -250,6 +250,9 @@
// For har files
HIVEARCHIVEENABLED("hive.archive.enabled", false),
HIVEHARPARENTDIRSETTABLE("hive.archive.har.parentdir.settable", false),
+
+ //variable substitution
+ HIVEVARIABLESUBSTITUTE("hive.variable.substitute", true),
;
public final String varname;
@@ -547,4 +550,5 @@
public static String getColumnInternalName(int pos) {
return "_col" + pos;
}
+
}
Index: ql/src/test/results/clientpositive/set_processor_namespaces.q.out
===================================================================
--- ql/src/test/results/clientpositive/set_processor_namespaces.q.out (revision 0)
+++ ql/src/test/results/clientpositive/set_processor_namespaces.q.out (revision 0)
@@ -0,0 +1,63 @@
+zzz=5
+system:xxx=5
+system:yyy=5
+go=5
+raw=${zzz}
+PREHOOK: query: EXPLAIN SELECT * FROM src where key=5
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT * FROM src where key=5
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 5))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Filter Operator
+ predicate:
+ expr: (key = 5)
+ type: boolean
+ Filter Operator
+ predicate:
+ expr: (key = 5)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT * FROM src where key=5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/mnt/data/hive/hive/build/ql/scratchdir/hive_2010-07-08_20-36-06_114_378550270322471814/10000
+POSTHOOK: query: SELECT * FROM src where key=5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/mnt/data/hive/hive/build/ql/scratchdir/hive_2010-07-08_20-36-06_114_378550270322471814/10000
+5 val_5
+5 val_5
+5 val_5
+c=1
+../lib/derby.jar
Index: ql/src/test/queries/clientpositive/set_processor_namespaces.q
===================================================================
--- ql/src/test/queries/clientpositive/set_processor_namespaces.q (revision 0)
+++ ql/src/test/queries/clientpositive/set_processor_namespaces.q (revision 0)
@@ -0,0 +1,32 @@
+set zzz=5;
+set zzz;
+
+set system:xxx=5;
+set system:xxx;
+
+set system:yyy=${system:xxx};
+set system:yyy;
+
+set go=${zzz};
+set go;
+
+set hive.variable.substitute=false;
+set raw=${zzz};
+set raw;
+
+set hive.variable.substitute=true;
+
+EXPLAIN SELECT * FROM src where key=${zzz};
+SELECT * FROM src where key=${zzz};
+
+set a=1;
+set b=a;
+set c=${${b}};
+set c;
+
+set jar=../lib/derby.jar;
+
+add file ${jar};
+list file;
+delete file ${jar};
+list file;
Index: ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (revision 955109)
+++ ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (working copy)
@@ -21,6 +21,7 @@
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -39,6 +40,7 @@
public CommandProcessorResponse run(String command) {
SessionState ss = SessionState.get();
+ command = new VariableSubstitution().substitute(ss.getConf(),command);
String[] tokens = command.split("\\s+");
SessionState.ResourceType t;
if (tokens.length < 2
Index: ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (revision 955109)
+++ ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (working copy)
@@ -21,6 +21,7 @@
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -38,6 +39,7 @@
public CommandProcessorResponse run(String command) {
SessionState ss = SessionState.get();
+ command = new VariableSubstitution().substitute(ss.getConf(),command);
String[] tokens = command.split("\\s+");
SessionState.ResourceType t;
Index: ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (revision 955109)
+++ ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (working copy)
@@ -18,11 +18,12 @@
package org.apache.hadoop.hive.ql.processors;
+import java.util.Map;
import java.util.Properties;
-import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
+import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
import org.apache.hadoop.hive.ql.session.SessionState;
/**
@@ -57,6 +58,15 @@
for (Map.Entry entries : sortedMap.entrySet()) {
ss.out.println(entries.getKey() + "=" + entries.getValue());
}
+
+ for (Map.Entry entry : mapToSortedMap(System.getenv()).entrySet()) {
+ ss.out.println("env:"+entry.getKey() + "=" + entry.getValue());
+ }
+
+ for (Map.Entry entry :
+ propertiesToSortedMap(System.getProperties()).entrySet() ) {
+ ss.out.println("system:"+entry.getKey() + "=" + entry.getValue());
+ }
}
private void dumpOption(Properties p, String s) {
@@ -72,6 +82,67 @@
public void init() {
}
+ private CommandProcessorResponse setVariable(String varname, String varvalue){
+ SessionState ss = SessionState.get();
+ if (varname.startsWith("env:")){
+ ss.err.println("env:* variables can not be set.");
+ return new CommandProcessorResponse(1);
+ } else if (varname.startsWith("system:")){
+ String propName = varname.substring(7);
+ System.getProperties().setProperty(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue));
+ return new CommandProcessorResponse(0);
+ } else {
+ ss.getConf().set(varname, new VariableSubstitution().substitute(ss.getConf(),varvalue) );
+ return new CommandProcessorResponse(0);
+ }
+ }
+
+ private SortedMap propertiesToSortedMap(Properties p){
+ SortedMap sortedPropMap = new TreeMap();
+ for (Map.Entry