diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java index 19f5bde..87a0c5a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java @@ -20,6 +20,7 @@ import java.io.PrintStream; import java.util.Arrays; +import java.util.ArrayList; import java.util.Map; import org.slf4j.Logger; @@ -30,6 +31,7 @@ import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -74,7 +76,7 @@ public CommandProcessorResponse run(String command) { } }).substitute(ss.getConf(), command); - String[] tokens = command.split("\\s+"); + String[] tokens = splitCmd(command); CommandProcessorResponse authErrResp = CommandUtil.authorizeCommand(ss, HiveOperationType.DFS, Arrays.asList(tokens)); if(authErrResp != null){ @@ -104,4 +106,60 @@ public CommandProcessorResponse run(String command) { } } + private String[] splitCmd(String command) throws CommandNeedRetryException { + + ArrayList paras = new ArrayList(); + int cmdLng = command.length(); + char y = 0; + int start = 0; + + for (int i = 0; i < cmdLng; i++) { + char x = command.charAt(i); + + switch(x) { + case ' ': + if ((int) y == 0) { + String str = command.substring(start, i).trim(); + if (!str.equals("")) { + paras.add(str); + start = i + 1; + } + } + break; + case '"': + if ((int) y == 0) { + y = x; + start = i + 1; + } else if ('"' == y) { + paras.add(command.substring(start, i).trim()); + y = 0; + start = i + 1; + } + break; + case '\'': + if ((int) y == 0) { + y = x; + start = i + 1; + } else if ('\'' == y) { + paras.add(command.substring(start, i).trim()); + y = 0; + start = i + 1; + } + break; + default: + if (i == cmdLng-1 && start < cmdLng) { + paras.add(command.substring(start, cmdLng).trim()); + } + break; + } + } + + if ((int) y != 0) { + console.printError("Syntax error on hadoop options: dfs " + command); + throw new CommandNeedRetryException(); + } + + return paras.toArray(new String[paras.size()]); + } + } diff --git a/ql/src/test/queries/clientpositive/dfscmd.q b/ql/src/test/queries/clientpositive/dfscmd.q new file mode 100644 index 0000000..d4b31e3 --- /dev/null +++ b/ql/src/test/queries/clientpositive/dfscmd.q @@ -0,0 +1,6 @@ +dfs -mkdir "hello"; +dfs -mkdir 'world'; +dfs -mkdir "bei jing"; +dfs -rmr 'hello'; +dfs -rmr "world"; +dfs -rmr 'bei jing'; \ No newline at end of file diff --git a/ql/src/test/results/clientpositive/dfscmd.q.out b/ql/src/test/results/clientpositive/dfscmd.q.out new file mode 100644 index 0000000..0b8182a --- /dev/null +++ b/ql/src/test/results/clientpositive/dfscmd.q.out @@ -0,0 +1 @@ +#### A masked pattern was here ####