diff --git build-common.xml build-common.xml
index 96e8046..e07157f 100644
--- build-common.xml
+++ build-common.xml
@@ -59,7 +59,7 @@
-
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
index 60aa614..9422bf7 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
@@ -138,7 +138,6 @@ public int hashCode() {
public void setOp(Operator extends OperatorDesc> op) {
this.op = op;
}
-
}
private static class MapOpCtx {
@@ -468,8 +467,8 @@ else if (partRawRowObjectInspector.equals(tblRawRowObjectInspector)) {
public void setChildren(Configuration hconf) throws HiveException {
- Path fpath = new Path((new Path(HiveConf.getVar(hconf,
- HiveConf.ConfVars.HADOOPMAPFILENAME))).toUri().getPath());
+ Path fpath = new Path(HiveConf.getVar(hconf,
+ HiveConf.ConfVars.HADOOPMAPFILENAME));
ArrayList> children =
new ArrayList>();
@@ -481,7 +480,7 @@ public void setChildren(Configuration hconf) throws HiveException {
try {
for (String onefile : conf.getPathToAliases().keySet()) {
MapOpCtx opCtx = initObjectInspector(conf, hconf, onefile, convertedOI);
- Path onepath = new Path(new Path(onefile).toUri().getPath());
+ Path onepath = new Path(onefile);
List aliases = conf.getPathToAliases().get(onefile);
for (String onealias : aliases) {
@@ -513,7 +512,7 @@ public void setChildren(Configuration hconf) throws HiveException {
// didn't find match for input file path in configuration!
// serious problem ..
LOG.error("Configuration does not have any alias for path: "
- + fpath.toUri().getPath());
+ + fpath.toUri());
throw new HiveException("Configuration and input path are inconsistent");
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 814213d..20e6ddb 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -690,11 +690,6 @@ public static PartitionDesc getPartitionDescFromTableDesc(TableDesc tblDesc, Par
return new PartitionDesc(part, tblDesc);
}
- public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator> work) {
- mr.addMapWork(tbl.getDataLocation().getPath(), alias, work, new PartitionDesc(
- getTableDesc(tbl), (LinkedHashMap) null));
- }
-
private static String getOpTreeSkel_helper(Operator> op, String indent) {
if (op == null) {
return "";
diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index 6f8bc47..627f084 100644
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -44,6 +44,7 @@
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
@@ -132,6 +133,11 @@ protected void setUp() {
mr = PlanUtils.getMapRedWork();
}
+ public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator> work) {
+ mr.addMapWork(tbl.getDataLocation().toString(), alias, work, new PartitionDesc(
+ Utilities.getTableDesc(tbl), null));
+ }
+
private static void fileDiff(String datafile, String testdir) throws Exception {
String testFileDir = conf.get("test.data.files");
System.out.println(testFileDir);
@@ -190,7 +196,7 @@ private void populateMapPlan1(Table src) {
Operator op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
- Utilities.addMapWork(mr, src, "a", op1);
+ addMapWork(mr, src, "a", op1);
}
@SuppressWarnings("unchecked")
@@ -209,7 +215,7 @@ private void populateMapPlan2(Table src) {
Operator op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
- Utilities.addMapWork(mr, src, "a", op1);
+ addMapWork(mr, src, "a", op1);
}
@SuppressWarnings("unchecked")
@@ -226,7 +232,7 @@ private void populateMapRedPlan1(Table src) throws SemanticException {
Utilities.makeList(getStringColumn("value")), outputColumns, true,
-1, 1, -1));
- Utilities.addMapWork(mr, src, "a", op1);
+ addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
@@ -254,7 +260,7 @@ private void populateMapRedPlan2(Table src) throws SemanticException {
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns, false, -1, 1, -1));
- Utilities.addMapWork(mr, src, "a", op1);
+ addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
@@ -287,7 +293,7 @@ private void populateMapRedPlan3(Table src, Table src2) throws SemanticException
Utilities.makeList(getStringColumn("value")), outputColumns, true,
Byte.valueOf((byte) 0), 1, -1));
- Utilities.addMapWork(mr, src, "a", op1);
+ addMapWork(mr, src, "a", op1);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
@@ -296,7 +302,7 @@ private void populateMapRedPlan3(Table src, Table src2) throws SemanticException
Utilities.makeList(getStringColumn("key")), outputColumns, true,
Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1));
- Utilities.addMapWork(mr, src2, "b", op2);
+ addMapWork(mr, src2, "b", op2);
mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
// reduce side work
@@ -338,7 +344,7 @@ private void populateMapRedPlan4(Table src) throws SemanticException {
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns), op0);
- Utilities.addMapWork(mr, src, "a", op4);
+ addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
@@ -375,7 +381,7 @@ private void populateMapRedPlan5(Table src) throws SemanticException {
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns), op0);
- Utilities.addMapWork(mr, src, "a", op4);
+ addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op0.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
@@ -414,7 +420,7 @@ private void populateMapRedPlan6(Table src) throws SemanticException {
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns), op0);
- Utilities.addMapWork(mr, src, "a", op4);
+ addMapWork(mr, src, "a", op4);
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
index aae0b15..79bed09 100644
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
@@ -320,7 +320,7 @@ public void testMapOperator() throws Throwable {
aliases.add("b");
LinkedHashMap> pathToAliases =
new LinkedHashMap>();
- pathToAliases.put("/testDir", aliases);
+ pathToAliases.put("hdfs:///testDir", aliases);
// initialize pathToTableInfo
// Default: treat the table as a single column "col"
@@ -328,7 +328,7 @@ public void testMapOperator() throws Throwable {
PartitionDesc pd = new PartitionDesc(td, null);
LinkedHashMap pathToPartitionInfo =
new LinkedHashMap();
- pathToPartitionInfo.put("/testDir", pd);
+ pathToPartitionInfo.put("hdfs:///testDir", pd);
// initialize aliasToWork
CollectDesc cd = new CollectDesc(Integer.valueOf(1));
diff --git ql/src/test/queries/clientpositive/schemeAuthority.q ql/src/test/queries/clientpositive/schemeAuthority.q
new file mode 100644
index 0000000..22d6c37
--- /dev/null
+++ ql/src/test/queries/clientpositive/schemeAuthority.q
@@ -0,0 +1,15 @@
+dfs -mkdir file:///tmp/test;
+dfs -mkdir hdfs:///tmp/test;
+
+create external table dynPart (key string) partitioned by (value string) row format delimited fields terminated by '\\t' stored as textfile;
+insert overwrite local directory "/tmp/test" select key from src where (key = 10) order by key;
+insert overwrite directory "/tmp/test" select key from src where (key = 20) order by key;
+alter table dynPart add partition (value='0') location 'file:///tmp/test';
+alter table dynPart add partition (value='1') location 'hdfs:///tmp/test';
+select count(*) from dynPart;
+select key from dynPart;
+select key from src where (key = 10) order by key;
+select key from src where (key = 20) order by key;
+
+dfs -rmr file:///tmp/test;
+dfs -rmr hdfs:///tmp/test;
diff --git ql/src/test/results/clientpositive/schemeAuthority.q.out ql/src/test/results/clientpositive/schemeAuthority.q.out
new file mode 100644
index 0000000..eabee19
--- /dev/null
+++ ql/src/test/results/clientpositive/schemeAuthority.q.out
@@ -0,0 +1,77 @@
+PREHOOK: query: create external table dynPart (key string) partitioned by (value string) row format delimited fields terminated by '\\t' stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table dynPart (key string) partitioned by (value string) row format delimited fields terminated by '\\t' stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dynPart
+#### A masked pattern was here ####
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@dynpart
+#### A masked pattern was here ####
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@dynpart
+POSTHOOK: Output: default@dynpart@value=0
+#### A masked pattern was here ####
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@dynpart
+#### A masked pattern was here ####
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@dynpart
+POSTHOOK: Output: default@dynpart@value=1
+PREHOOK: query: select count(*) from dynPart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dynpart
+PREHOOK: Input: default@dynpart@value=0
+PREHOOK: Input: default@dynpart@value=1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from dynPart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dynpart
+POSTHOOK: Input: default@dynpart@value=0
+POSTHOOK: Input: default@dynpart@value=1
+#### A masked pattern was here ####
+2
+PREHOOK: query: select key from dynPart
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dynpart
+PREHOOK: Input: default@dynpart@value=0
+PREHOOK: Input: default@dynpart@value=1
+#### A masked pattern was here ####
+POSTHOOK: query: select key from dynPart
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dynpart
+POSTHOOK: Input: default@dynpart@value=0
+POSTHOOK: Input: default@dynpart@value=1
+#### A masked pattern was here ####
+20
+10
+PREHOOK: query: select key from src where (key = 10) order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key from src where (key = 10) order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10
+PREHOOK: query: select key from src where (key = 20) order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key from src where (key = 20) order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+20
+#### A masked pattern was here ####