diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java index 3cd2a8b..d8e44de 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -87,7 +87,13 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.IOUtils; +import org.apache.hive.beeline.cli.CliOptionsProcessor; + +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; /** * A console SQL shell with command completion. @@ -126,8 +132,11 @@ private ConsoleReader consoleReader; private List batch = null; private final Reflector reflector; + private String dbName = null; + private String currentDatabase = null; private History history; + private boolean isBeeLine = true; private static final Options options = new Options(); @@ -401,10 +410,8 @@ String getManifestAttribute(String name) { String getApplicationTitle() { Package pack = BeeLine.class.getPackage(); - return loc("app-introduction", new Object[] { - "Beeline", - pack.getImplementationVersion() == null ? "???" - : pack.getImplementationVersion(), + return loc("app-introduction", new Object[] { "Beeline", + pack.getImplementationVersion() == null ? "???" : pack.getImplementationVersion(), "Apache Hive", // getManifestAttribute ("Specification-Title"), // getManifestAttribute ("Implementation-Version"), @@ -491,10 +498,14 @@ public static void mainWithInputRedirection(String[] args, InputStream inputStre public BeeLine() { + this(true); + } + + public BeeLine(boolean isBeeLine) { beeLineCommandCompleter = new BeeLineCommandCompleter(BeeLineCommandCompleter.getCompleters (this)); reflector = new Reflector(this); - + this.isBeeLine = isBeeLine; // attempt to dynamically load signal handler /* TODO disable signal handler try { @@ -508,7 +519,6 @@ public BeeLine() { */ } - DatabaseConnection getDatabaseConnection() { return getDatabaseConnections().current(); } @@ -633,7 +643,68 @@ protected void processOption(final String arg, final ListIterator iter) throws super.processOption(arg, iter); } } + } + + int initArgsFromCliVars(String[] args) { + List commands = Collections.emptyList(); + + CliOptionsProcessor optionsProcessor = new CliOptionsProcessor(); + if (!optionsProcessor.process(args)) { + return 1; + } + CommandLine commandLine = optionsProcessor.getCommandLine(); + + + Properties confProps = commandLine.getOptionProperties("hiveconf"); + for (String propKey : confProps.stringPropertyNames()) { + getOpts().getHiveConfVariables().put(propKey, confProps.getProperty(propKey)); + } + + Properties hiveVars = commandLine.getOptionProperties("define"); + for (String propKey : hiveVars.stringPropertyNames()) { + getOpts().getHiveConfVariables().put(propKey, hiveVars.getProperty(propKey)); + } + Properties hiveVars2 = commandLine.getOptionProperties("hivevar"); + for (String propKey : hiveVars2.stringPropertyNames()) { + getOpts().getHiveConfVariables().put(propKey, hiveVars2.getProperty(propKey)); + } + + getOpts().setScriptFile(commandLine.getOptionValue("f")); + + if (commandLine.getOptionValues("i") != null) { + getOpts().setInitFiles(commandLine.getOptionValues("i")); + } + + dbName = commandLine.getOptionValue("database"); + getOpts().setVerbose(Boolean.valueOf(commandLine.getOptionValue("verbose"))); + getOpts().setSilent(Boolean.valueOf(commandLine.getOptionValue("slient"))); + + int code = 0; + if (commandLine.getOptionValues("e") != null) { + commands = Arrays.asList(commandLine.getOptionValues("e")); + } + + if (!commands.isEmpty() && getOpts().getScriptFile() != null) { + System.err.println("The '-e' and '-f' options cannot be specified simultaneously"); + optionsProcessor.printCliUsage(); + return 1; + } + + if (!commands.isEmpty()) { + embeddedConnect(); + connectDBInEmbededMode(); + updateOptsForCli(); + for (Iterator i = commands.iterator(); i.hasNext(); ) { + String command = i.next().toString(); + debug(loc("executing-command", command)); + if (!dispatch(command)) { + code++; + } + } + exit = true; // execute and exit + } + return code; } int initArgs(String[] args) { @@ -681,13 +752,12 @@ int initArgs(String[] args) { pass = cl.getOptionValue("p"); } url = cl.getOptionValue("u"); - getOpts().setInitFile(cl.getOptionValue("i")); + getOpts().setInitFiles(cl.getOptionValues("i")); getOpts().setScriptFile(cl.getOptionValue("f")); if (cl.getOptionValues('e') != null) { commands = Arrays.asList(cl.getOptionValues('e')); } - // TODO: temporary disable this for easier debugging /* if (url == null) { @@ -741,6 +811,14 @@ private String obtainPasswordFromFile(String passwordFilePath) { } } + private void updateOptsForCli() { + getOpts().updateBeeLineOptsFromConf(); + getOpts().setShowHeader(false); + getOpts().setOutputFormat("dsv"); + getOpts().setDelimiterForDSV(' '); + getOpts().setNullEmptyString(true); + } + /** * Start accepting input from stdin, and dispatch it * to the appropriate {@link CommandHandler} until the @@ -755,9 +833,20 @@ public int begin(String[] args, InputStream inputStream) throws IOException { } try { - int code = initArgs(args); - if (code != 0) { - return code; + if (isBeeLine) { + int code = initArgs(args); + if (code != 0) { + return code; + } + } else { + int code = initArgsFromCliVars(args); + if (code != 0 || exit) { + return code; + } + defaultConnect(false); + updateOptsForCli(); + + processInitFiles(opts.getInitFiles()); } if (getOpts().getScriptFile() != null) { @@ -776,22 +865,64 @@ public int begin(String[] args, InputStream inputStream) throws IOException { } int runInit() { - String initFile = getOpts().getInitFile(); - if (initFile != null) { - info("Running init script " + initFile); - try { - return executeFile(initFile); - } finally { - exit = false; + String initFiles[] = getOpts().getInitFiles(); + if (initFiles != null && initFiles.length != 0) { + for (String initFile : initFiles) { + info("Running init script " + initFile); + try { + return executeFile(initFile); + } finally { + exit = false; + } } } return ERRNO_OK; } + private int embeddedConnect() { + if (!execCommandWithPrefix("!connect " + BEELINE_DEFAULT_JDBC_URL + " '' ''")) { + return ERRNO_OTHER; + } else { + return ERRNO_OK; + } + } + + private int connectDBInEmbededMode() { + if (dbName != null && !dbName.isEmpty()) { + if (!dispatch("use " + dbName + ";")) { + return ERRNO_OTHER; + } + } + return ERRNO_OK; + } + + public int defaultConnect(boolean exitOnError) { + if (embeddedConnect() != ERRNO_OK && exitOnError) { + return ERRNO_OTHER; + } + if (connectDBInEmbededMode() != ERRNO_OK && exitOnError) { + return ERRNO_OTHER; + } + return ERRNO_OK; + } + private int executeFile(String fileName) { - FileInputStream initStream = null; + InputStream initStream = null; try { - initStream = new FileInputStream(fileName); + if (!isBeeLine) { + org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(fileName); + FileSystem fs; + HiveConf conf = getCommands().getHiveConf(true); + if (!path.toUri().isAbsolute()) { + fs = FileSystem.getLocal(conf); + path = fs.makeQualified(path); + } else { + fs = FileSystem.get(path.toUri(), conf); + } + initStream = fs.open(path); + } else { + initStream = new FileInputStream(fileName); + } return execute(getConsoleReader(initStream), !getOpts().getForce()); } catch (Throwable t) { handleException(t); @@ -803,14 +934,35 @@ private int executeFile(String fileName) { } } + /** + * Only initial files specified by i option will be executed. The hiverc file will be processed by session manager. + * + * @param files + * @throws IOException + */ + public void processInitFiles(String[] files) throws IOException { + if (files == null || files.length == 0) { + return; + } + for (String initFile : files) { + int rc = executeFile(initFile); + if (rc != 0) { + System.exit(rc); + } + } + } + private int execute(ConsoleReader reader, boolean exitOnError) { String line; while (!exit) { try { // Execute one instruction; terminate on executing a script if there is an error // in silent mode, prevent the query and prompt being echoed back to terminal - line = (getOpts().isSilent() && getOpts().getScriptFile() != null) ? - reader.readLine(null, ConsoleReader.NULL_MASK) : reader.readLine(getPrompt()); + line = (getOpts().isSilent() && getOpts().getScriptFile() != null) ? reader + .readLine(null, ConsoleReader.NULL_MASK) : reader.readLine(getPrompt()); + + // trim line + line = (line == null) ? null : line.trim(); if (!dispatch(line) && exitOnError) { return ERRNO_OTHER; @@ -870,7 +1022,7 @@ public ConsoleReader getConsoleReader(InputStream inputStream) throws IOExceptio handleException(e); } - if (inputStream instanceof FileInputStream) { + if (inputStream instanceof FileInputStream || inputStream instanceof FSDataInputStream) { // from script.. no need to load history and no need of completer, either return consoleReader; } @@ -908,11 +1060,38 @@ public void run() { return consoleReader; } - void usage() { output(loc("cmd-usage")); } + /** + * This method is used for executing commands beginning with ! + * @param line + * @return + */ + public boolean execCommandWithPrefix(String line) { + Map cmdMap = new TreeMap(); + line = line.substring(1); + for (int i = 0; i < commandHandlers.length; i++) { + String match = commandHandlers[i].matches(line); + if (match != null) { + cmdMap.put(match, commandHandlers[i]); + } + } + + if (cmdMap.size() == 0) { + return error(loc("unknown-command", line)); + } + if (cmdMap.size() > 1) { + // any exact match? + CommandHandler handler = cmdMap.get(line); + if (handler == null) { + return error(loc("multiple-matches", cmdMap.keySet().toString())); + } + return handler.execute(line); + } + return cmdMap.values().iterator().next().execute(line); + } /** * Dispatch the specified line to the appropriate {@link CommandHandler}. @@ -947,33 +1126,13 @@ boolean dispatch(String line) { line = "!help"; } - if (line.startsWith(COMMAND_PREFIX)) { - Map cmdMap = new TreeMap(); - line = line.substring(1); - for (int i = 0; i < commandHandlers.length; i++) { - String match = commandHandlers[i].matches(line); - if (match != null) { - CommandHandler prev = cmdMap.put(match, commandHandlers[i]); - if (prev != null) { - return error(loc("multiple-matches", - Arrays.asList(prev.getName(), commandHandlers[i].getName()))); - } - } - } - - if (cmdMap.size() == 0) { - return error(loc("unknown-command", line)); - } - if (cmdMap.size() > 1) { - // any exact match? - CommandHandler handler = cmdMap.get(line); - if (handler == null) { - return error(loc("multiple-matches", cmdMap.keySet().toString())); - } - return handler.execute(line); + if (isBeeLine) { + if (line.startsWith(COMMAND_PREFIX) && !line.contains(";")) { + // handle the case "!cmd" for beeline + return execCommandWithPrefix(line); + } else { + return commands.sql(line, getOpts().getEntireLineAsCommand()); } - return cmdMap.values().iterator().next() - .execute(line); } else { return commands.sql(line, getOpts().getEntireLineAsCommand()); } @@ -1226,20 +1385,55 @@ void showWarnings(SQLWarning warn) { } } - String getPrompt() { + if (isBeeLine) { + return getPromptForBeeline(); + } else { + return getPromptForCli(); + } + } + + String getPromptForCli() { + String prompt; + // read prompt configuration and substitute variables. + HiveConf conf = getCommands().getHiveConf(true); + prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT); + prompt = getCommands().substituteVariables(conf, prompt); + return prompt + getFormattedDb(conf) + "> "; + } + + /** + * Retrieve the current database name string to display, based on the + * configuration value. + * + * @param conf storing whether or not to show current db + * @return String to show user for current db value + */ + String getFormattedDb(HiveConf conf) { + if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB)) { + return ""; + } + String currDb = getCurrentDatabase(); + + if (currDb == null) { + return ""; + } + + return " (" + currDb + ")"; + } + + String getPromptForBeeline() { if (getDatabaseConnection() == null || getDatabaseConnection().getUrl() == null) { return "beeline> "; } else { String printClosed = getDatabaseConnection().isClosed() ? " (closed)" : ""; - String url = getOpts().getShowConnectedUrl() ? getDatabaseConnection().getConnectedUrl() - : getDatabaseConnection().getUrl(); - return getPrompt(getDatabaseConnections().getIndex() + ": " + url) + printClosed + "> "; + return getPromptForBeeline(getDatabaseConnections().getIndex() + + ": " + getDatabaseConnection().getUrl()) + printClosed + "> "; } } - static String getPrompt(String url) { + static String getPromptForBeeline(String url) { if (url == null || url.length() == 0) { url = "beeline"; } @@ -1965,4 +2159,23 @@ void setBatch(List batch) { protected Reflector getReflector() { return reflector; } + + public boolean isBeeLine() { + return isBeeLine; + } + + public void setBeeLine(boolean isBeeLine) { + this.isBeeLine = isBeeLine; + } + + public String getCurrentDatabase() { + if (currentDatabase == null) { + currentDatabase = DEFAULT_DATABASE_NAME; + } + return currentDatabase; + } + + public void setCurrentDatabase(String currentDatabase) { + this.currentDatabase = currentDatabase; + } } diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java b/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java index 3388391..8e1d11b 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLineOpts.java @@ -42,6 +42,7 @@ import jline.TerminalFactory; import jline.console.completer.Completer; import jline.console.completer.StringsCompleter; +import org.apache.hadoop.hive.conf.HiveConf; class BeeLineOpts implements Completer { public static final int DEFAULT_MAX_WIDTH = 80; @@ -78,6 +79,8 @@ int timeout = -1; private String isolation = DEFAULT_ISOLATION_LEVEL; private String outputFormat = "table"; + // This configuration is used only for client side configuration. + private HiveConf conf; private boolean trimScripts = true; private boolean allowMultiLineCommand = true; private boolean showConnectedUrl = false; @@ -91,7 +94,7 @@ private String historyFile = new File(saveDir(), "history").getAbsolutePath(); private String scriptFile = null; - private String initFile = null; + private String[] initFiles = null; private String authType = null; private char delimiterForDSV = DEFAULT_DELIMITER_FOR_DSV; @@ -115,7 +118,7 @@ public BeeLineOpts(BeeLine beeLine, Properties props) { public String[] possibleSettingValues() { List vals = new LinkedList(); - vals.addAll(Arrays.asList(new String[] {"yes", "no"})); + vals.addAll(Arrays.asList(new String[] { "yes", "no" })); return vals.toArray(new String[vals.size()]); } @@ -220,6 +223,21 @@ public void load(InputStream fin) throws IOException { loadProperties(p); } + /** + * Update the options after connection is established in CLI mode. + */ + public void updateBeeLineOptsFromConf() { + if (!beeLine.isBeeLine()) { + if (conf == null) { + conf = beeLine.getCommands().getHiveConf(false); + } + setForce(HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS)); + } + } + + public void setHiveConf(HiveConf conf) { + this.conf = conf; + } public void loadProperties(Properties props) { for (Object element : props.keySet()) { @@ -371,12 +389,12 @@ public String getScriptFile() { return scriptFile; } - public String getInitFile() { - return initFile; + public String[] getInitFiles() { + return initFiles; } - public void setInitFile(String initFile) { - this.initFile = initFile; + public void setInitFiles(String[] initFiles) { + this.initFiles = initFiles; } public void setColor(boolean color) { @@ -392,7 +410,14 @@ public void setShowHeader(boolean showHeader) { } public boolean getShowHeader() { - return showHeader; + if (beeLine.isBeeLine()) { + return showHeader; + } else { + boolean header; + HiveConf conf = beeLine.getCommands().getHiveConf(true); + header = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER); + return header; + } } public void setHeaderInterval(int headerInterval) { @@ -529,5 +554,9 @@ public char getDelimiterForDSV() { public void setDelimiterForDSV(char delimiterForDSV) { this.delimiterForDSV = delimiterForDSV; } + + public HiveConf getConf() { + return conf; + } } diff --git a/beeline/src/java/org/apache/hive/beeline/ClientCommandHookFactory.java b/beeline/src/java/org/apache/hive/beeline/ClientCommandHookFactory.java new file mode 100644 index 0000000..c4d97bc --- /dev/null +++ b/beeline/src/java/org/apache/hive/beeline/ClientCommandHookFactory.java @@ -0,0 +1,85 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline; + +/** + * We need to update some client side information after executing some Hive Commands + */ +public class ClientCommandHookFactory { + private final static ClientCommandHookFactory instance = new ClientCommandHookFactory(); + + private ClientCommandHookFactory() { + } + + public static ClientCommandHookFactory get() { + return instance; + } + + public class SetCommandHook extends ClientHook { + + public SetCommandHook(String sql) { + super(sql); + } + + @Override + public void postHook(BeeLine beeLine) { + if (!beeLine.isBeeLine()) { + beeLine.getOpts().setHiveConf(beeLine.getCommands().getHiveConf(false)); + } + } + } + + public class UseCommandHook extends ClientHook { + + public UseCommandHook(String sql) { + super(sql); + } + + @Override + public void postHook(BeeLine beeLine) { + if (!beeLine.isBeeLine()) { + // Handler multi-line sql + String line = sql.replaceAll("\\s+", " "); + String strs[] = line.split(" "); + String dbName; + if (strs == null || strs.length != 2) { + // unable to parse the use command + dbName = ""; + } else { + dbName = strs[1]; + } + beeLine.setCurrentDatabase(dbName); + } + } + } + + public ClientHook getHook(String cmdLine) { + if (cmdLine.toLowerCase().startsWith("set")) { + // Only set A = B command needs updating the configuration stored in client side. + if (cmdLine.contains("=")) { + return new SetCommandHook(cmdLine); + } else { + return null; + } + } else if (cmdLine.toLowerCase().startsWith("use")) { + return new UseCommandHook(cmdLine); + } else { + return null; + } + } +} diff --git a/beeline/src/java/org/apache/hive/beeline/ClientHook.java b/beeline/src/java/org/apache/hive/beeline/ClientHook.java new file mode 100644 index 0000000..3de6def --- /dev/null +++ b/beeline/src/java/org/apache/hive/beeline/ClientHook.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline; + +/** + * This is the client's hook and used for new Hive CLI. For some configurations like + * set and use, it may change some prompt information in the client side. So the hook + * will be executed after some of the commands are used. + */ +public abstract class ClientHook { + protected String sql; + + public ClientHook(String sql) { + this.sql = sql; + } + + abstract void postHook(BeeLine beeLine); +} diff --git a/beeline/src/java/org/apache/hive/beeline/Commands.java b/beeline/src/java/org/apache/hive/beeline/Commands.java index 3cdcfb8..d16b4ec 100644 --- a/beeline/src/java/org/apache/hive/beeline/Commands.java +++ b/beeline/src/java/org/apache/hive/beeline/Commands.java @@ -22,6 +22,10 @@ */ package org.apache.hive.beeline; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.SystemVariables; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.io.IOUtils; import java.io.BufferedReader; @@ -32,7 +36,6 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Method; -import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.sql.CallableStatement; @@ -45,9 +48,11 @@ import java.sql.SQLWarning; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; @@ -220,9 +225,8 @@ public boolean primarykeys(String line) throws Exception { public boolean exportedkeys(String line) throws Exception { - return metadata("getExportedKeys", new String[] { - beeLine.getConnection().getCatalog(), null, - arg1(line, "table name"),}); + return metadata("getExportedKeys", + new String[] { beeLine.getConnection().getCatalog(), null, arg1(line, "table name"), }); } @@ -711,10 +715,351 @@ public boolean sql(String line) { return execute(line, false, false); } + /** + * This method is used for retrieving the latest configuration from hive server2. + * It uses the set command processor. + * + * @return + */ + private Map getHiveVariables() { + Map result = new HashMap<>(); + BufferedRows rows = getConfInternal(true); + while (rows.hasNext()) { + Rows.Row row = (Rows.Row) rows.next(); + if (!row.isMeta) { + result.put(row.values[0], row.values[1]); + } + } + return result; + } + + /** + * This method should only be used in CLI mode. + * + * @return the hive configuration from server side + */ + public HiveConf getHiveConf(boolean call) { + HiveConf hiveConf = beeLine.getOpts().getConf(); + if (hiveConf != null && call) { + return hiveConf; + } else { + return getHiveConfHelper(call); + } + } + + public HiveConf getHiveConfHelper(boolean call) { + HiveConf conf = new HiveConf(); + BufferedRows rows = getConfInternal(call); + while (rows != null && rows.hasNext()) { + addConf((Rows.Row) rows.next(), conf); + } + return conf; + } + + /** + * Use call statement to retrieve the configurations for substitution and sql for the substitution. + * + * @param call + * @return + */ + private BufferedRows getConfInternal(boolean call) { + Statement stmnt = null; + BufferedRows rows = null; + try { + boolean hasResults; + if (call) { + stmnt = beeLine.getDatabaseConnection().getConnection().prepareCall("set"); + hasResults = ((CallableStatement) stmnt).execute(); + } else { + stmnt = beeLine.createStatement(); + hasResults = stmnt.execute("set"); + } + if (hasResults) { + ResultSet rs = stmnt.getResultSet(); + rows = new BufferedRows(beeLine, rs); + } + } catch (SQLException e) { + beeLine.error(e); + } finally { + if (stmnt != null) { + try { + stmnt.close(); + } catch (SQLException e1) { + beeLine.error(e1); + } + } + } + return rows; + } + + private void addConf(Rows.Row r, HiveConf hiveConf) { + if (r.isMeta) { + return; + } + if (r.values == null || r.values[0] == null || r.values[0].isEmpty()) { + return; + } + String val = r.values[0]; + if (r.values[0].startsWith(SystemVariables.SYSTEM_PREFIX) || r.values[0] + .startsWith(SystemVariables.ENV_PREFIX)) { + return; + } else { + String[] kv = val.split("=", 2); + hiveConf.set(kv[0], kv[1]); + } + } + + /** + * Extract and clean up the first command in the input. + */ + private String getFirstCmd(String cmd, int length) { + return cmd.substring(length).trim(); + } + + private String[] tokenizeCmd(String cmd) { + return cmd.split("\\s+"); + } + + private boolean isSourceCMD(String cmd) { + if (cmd == null || cmd.isEmpty()) + return false; + String[] tokens = tokenizeCmd(cmd); + return tokens[0].equalsIgnoreCase("source"); + } + + private boolean sourceFile(String cmd) { + String[] tokens = tokenizeCmd(cmd); + String cmd_1 = getFirstCmd(cmd, tokens[0].length()); + + cmd_1 = substituteVariables(getHiveConf(false), cmd_1); + File sourceFile = new File(cmd_1); + if (!sourceFile.isFile()) { + return false; + } else { + boolean ret; + try { + ret = sourceFileInternal(sourceFile); + } catch (IOException e) { + beeLine.error(e); + return false; + } + return ret; + } + } + + private boolean sourceFileInternal(File sourceFile) throws IOException { + BufferedReader reader = null; + try { + reader = new BufferedReader(new FileReader(sourceFile)); + String extra = reader.readLine(); + String lines = null; + while (extra != null) { + if (beeLine.isComment(extra)) { + continue; + } + if (lines == null) { + lines = extra; + } else { + lines += "\n" + extra; + } + extra = reader.readLine(); + } + String[] cmds = lines.split(";"); + for (String c : cmds) { + c = c.trim(); + if (!executeInternal(c, false)) { + return false; + } + } + } finally { + if (reader != null) { + reader.close(); + } + } + return true; + } + + public String cliToBeelineCmd(String cmd) { + if (cmd == null) + return null; + if (cmd.toLowerCase().equals("quit") || cmd.toLowerCase().equals("exit")) { + return BeeLine.COMMAND_PREFIX + cmd; + } else if (cmd.startsWith("!")) { + String shell_cmd = cmd.substring(1); + return "!sh " + shell_cmd; + } else { // local mode + // command like dfs + return cmd; + } + } + + // Return false only occurred error when execution the sql and the sql should follow the rules + // of beeline. + private boolean executeInternal(String sql, boolean call) { + if (!beeLine.isBeeLine()) { + sql = cliToBeelineCmd(sql); + } + + if (sql == null || sql.length() == 0) { + return true; + } + + if (beeLine.isComment(sql)) { + //skip this and rest cmds in the line + return true; + } + + // is source CMD + if (isSourceCMD(sql)) { + return sourceFile(sql); + } + + if (sql.startsWith(BeeLine.COMMAND_PREFIX)) { + return beeLine.execCommandWithPrefix(sql); + } + + String prefix = call ? "call" : "sql"; + + if (sql.startsWith(prefix)) { + sql = sql.substring(prefix.length()); + } + + // batch statements? + if (beeLine.getBatch() != null) { + beeLine.getBatch().add(sql); + return true; + } + + if (!(beeLine.assertConnection())) { + return false; + } + + ClientHook hook = null; + if (!beeLine.isBeeLine()) { + hook = ClientCommandHookFactory.get().getHook(sql); + } + + try { + Statement stmnt = null; + boolean hasResults; + Thread logThread = null; + + try { + long start = System.currentTimeMillis(); + + if (call) { + stmnt = beeLine.getDatabaseConnection().getConnection().prepareCall(sql); + hasResults = ((CallableStatement) stmnt).execute(); + } else { + stmnt = beeLine.createStatement(); + if (beeLine.getOpts().isSilent()) { + hasResults = stmnt.execute(sql); + } else { + logThread = new Thread(createLogRunnable(stmnt)); + logThread.setDaemon(true); + logThread.start(); + hasResults = stmnt.execute(sql); + logThread.interrupt(); + logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT); + } + } + + beeLine.showWarnings(); + + if (hasResults) { + do { + ResultSet rs = stmnt.getResultSet(); + try { + int count = beeLine.print(rs); + long end = System.currentTimeMillis(); + + beeLine.info( + beeLine.loc("rows-selected", count) + " " + beeLine.locElapsedTime(end - start)); + } finally { + if (logThread != null) { + logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT); + showRemainingLogsIfAny(stmnt); + logThread = null; + } + rs.close(); + } + } while (BeeLine.getMoreResults(stmnt)); + } else { + int count = stmnt.getUpdateCount(); + long end = System.currentTimeMillis(); + beeLine.info( + beeLine.loc("rows-affected", count) + " " + beeLine.locElapsedTime(end - start)); + } + } finally { + if (logThread != null) { + if (!logThread.isInterrupted()) { + logThread.interrupt(); + } + logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT); + showRemainingLogsIfAny(stmnt); + } + if (stmnt != null) { + stmnt.close(); + } + } + } catch (Exception e) { + return beeLine.error(e); + } + beeLine.showWarnings(); + if (hook != null) { + hook.postHook(beeLine); + } + return true; + } + + public String handleMultiLineCmd(String line) throws IOException { + //When using -e, console reader is not initialized and command is a single line + while (beeLine.getConsoleReader() != null && !(line.trim().endsWith(";")) && beeLine.getOpts() + .isAllowMultiLineCommand()) { + + if (!beeLine.getOpts().isSilent()) { + StringBuilder prompt = new StringBuilder(beeLine.getPrompt()); + for (int i = 0; i < prompt.length() - 1; i++) { + if (prompt.charAt(i) != '>') { + prompt.setCharAt(i, i % 2 == 0 ? '.' : ' '); + } + } + } + + String extra; + if (beeLine.getOpts().isSilent() && beeLine.getOpts().getScriptFile() != null) { + extra = beeLine.getConsoleReader().readLine(null, jline.console.ConsoleReader.NULL_MASK); + } else { + extra = beeLine.getConsoleReader().readLine(beeLine.getPrompt()); + } + + if (extra == null) { //it happens when using -f and the line of cmds does not end with ; + break; + } + if (!beeLine.isComment(extra)) { + line += "\n" + extra; + } + } + return line; + } + public boolean sql(String line, boolean entireLineAsCommand) { return execute(line, false, entireLineAsCommand); } + public String substituteVariables(HiveConf conf, String line) { + if (!beeLine.isBeeLine()) { + // Substitution is only supported in non-beeline mode. + return new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return getHiveVariables(); + } + }).substitute(conf, line); + } + return line; + } + public boolean sh(String line) { if (line == null || line.length() == 0) { return false; @@ -725,9 +1070,7 @@ public boolean sh(String line) { } line = line.substring("sh".length()).trim(); - - // Support variable substitution. HIVE-6791. - // line = new VariableSubstitution().substitute(new HiveConf(BeeLine.class), line.trim()); + line = substituteVariables(getHiveConf(false), line.trim()); try { ShellCmdExecutor executor = new ShellCmdExecutor(line, beeLine.getOutputStream(), @@ -740,7 +1083,6 @@ public boolean sh(String line) { return true; } catch (Exception e) { beeLine.error("Exception raised from Shell command " + e); - beeLine.error(e); return false; } } @@ -762,41 +1104,11 @@ private boolean execute(String line, boolean call, boolean entireLineAsCommand) // use multiple lines for statements not terminated by ";" try { - //When using -e, console reader is not initialized and command is a single line - while (beeLine.getConsoleReader() != null && !(line.trim().endsWith(";")) - && beeLine.getOpts().isAllowMultiLineCommand()) { - - if (!beeLine.getOpts().isSilent()) { - StringBuilder prompt = new StringBuilder(beeLine.getPrompt()); - for (int i = 0; i < prompt.length() - 1; i++) { - if (prompt.charAt(i) != '>') { - prompt.setCharAt(i, i % 2 == 0 ? '.' : ' '); - } - } - } - - String extra = null; - if (beeLine.getOpts().isSilent() && beeLine.getOpts().getScriptFile() != null) { - extra = beeLine.getConsoleReader().readLine(null, jline.console.ConsoleReader.NULL_MASK); - } else { - extra = beeLine.getConsoleReader().readLine(beeLine.getPrompt()); - } - - if (extra == null) { //it happens when using -f and the line of cmds does not end with ; - break; - } - if (!beeLine.isComment(extra)) { - line += "\n" + extra; - } - } + line = handleMultiLineCmd(line); } catch (Exception e) { beeLine.handleException(e); } - if (!(beeLine.assertConnection())) { - return false; - } - line = line.trim(); List cmdList = new ArrayList(); if (entireLineAsCommand) { @@ -817,93 +1129,9 @@ private boolean execute(String line, boolean call, boolean entireLineAsCommand) for (int i = 0; i < cmdList.size(); i++) { String sql = cmdList.get(i).trim(); if (sql.length() != 0) { - if (beeLine.isComment(sql)) { - //skip this and rest cmds in the line - break; - } - if (sql.startsWith(BeeLine.COMMAND_PREFIX)) { - sql = sql.substring(1); - } - - String prefix = call ? "call" : "sql"; - - if (sql.startsWith(prefix)) { - sql = sql.substring(prefix.length()); - } - - // batch statements? - if (beeLine.getBatch() != null) { - beeLine.getBatch().add(sql); - continue; - } - - try { - Statement stmnt = null; - boolean hasResults; - Thread logThread = null; - - try { - long start = System.currentTimeMillis(); - - if (call) { - stmnt = beeLine.getDatabaseConnection().getConnection().prepareCall(sql); - hasResults = ((CallableStatement) stmnt).execute(); - } else { - stmnt = beeLine.createStatement(); - if (beeLine.getOpts().isSilent()) { - hasResults = stmnt.execute(sql); - } else { - logThread = new Thread(createLogRunnable(stmnt)); - logThread.setDaemon(true); - logThread.start(); - hasResults = stmnt.execute(sql); - logThread.interrupt(); - logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT); - } - } - - beeLine.showWarnings(); - - if (hasResults) { - do { - ResultSet rs = stmnt.getResultSet(); - try { - int count = beeLine.print(rs); - long end = System.currentTimeMillis(); - - beeLine.info(beeLine.loc("rows-selected", count) + " " - + beeLine.locElapsedTime(end - start)); - } finally { - if (logThread != null) { - logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT); - showRemainingLogsIfAny(stmnt); - logThread = null; - } - rs.close(); - } - } while (BeeLine.getMoreResults(stmnt)); - } else { - int count = stmnt.getUpdateCount(); - long end = System.currentTimeMillis(); - beeLine.info(beeLine.loc("rows-affected", count) - + " " + beeLine.locElapsedTime(end - start)); - } - } finally { - if (logThread != null) { - if (!logThread.isInterrupted()) { - logThread.interrupt(); - } - logThread.join(DEFAULT_QUERY_PROGRESS_THREAD_TIMEOUT); - showRemainingLogsIfAny(stmnt); - } - if (stmnt != null) { - stmnt.close(); - } - } - } catch (Exception e) { - return beeLine.error(e); + if (!executeInternal(sql, call)) { + return false; } - beeLine.showWarnings(); } } return true; @@ -972,6 +1200,9 @@ public boolean quit(String line) { return true; } + public boolean exit(String line) { + return quit(line); + } /** * Close all connections. @@ -1209,7 +1440,6 @@ public boolean list(String line) { return true; } - public boolean all(String line) { int index = beeLine.getDatabaseConnections().getIndex(); boolean success = true; diff --git a/beeline/src/java/org/apache/hive/beeline/cli/CliOptionsProcessor.java b/beeline/src/java/org/apache/hive/beeline/cli/CliOptionsProcessor.java new file mode 100644 index 0000000..61c5ab6 --- /dev/null +++ b/beeline/src/java/org/apache/hive/beeline/cli/CliOptionsProcessor.java @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline.cli; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + +/** + * This class is used for parsing the options of Hive Cli + */ +public class CliOptionsProcessor { + private final Options options = new Options(); + private org.apache.commons.cli.CommandLine commandLine; + + public CliOptionsProcessor() { + // -database database + options.addOption(OptionBuilder.hasArg().withArgName("databasename").withLongOpt("database") + .withDescription("Specify the database to use").create()); + + // -e 'quoted-query-string' + options.addOption(OptionBuilder.hasArg().withArgName("quoted-query-string").withDescription + ("SQL from command line").create('e')); + + // -f + options.addOption(OptionBuilder.hasArg().withArgName("filename").withDescription("SQL from " + + "files").create('f')); + + // -i + options.addOption(OptionBuilder.hasArg().withArgName("filename").withDescription + ("Initialization SQL file").create('i')); + + // -hiveconf x=y + options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value") + .withLongOpt("hiveconf").withDescription("Use value for given property").create()); + + // Substitution option -d, --define + options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("key=value") + .withLongOpt("define").withDescription("Variable subsitution to apply to hive commands. e" + + ".g. -d A=B or --define A=B").create('d')); + + // Substitution option --hivevar + options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("key=value") + .withLongOpt("hivevar").withDescription("Variable subsitution to apply to hive commands. " + + "e.g. --hivevar A=B").create()); + + // [-S|--silent] + options.addOption(new Option("S", "silent", false, "Silent mode in interactive shell")); + + // [-v|--verbose] + options.addOption(new Option("v", "verbose", false, "Verbose mode (echo executed SQL to the " + + "console)")); + + // [-H|--help] + options.addOption(new Option("H", "help", false, "Print help information")); + } + + public boolean process(String []argv){ + try { + commandLine = new GnuParser().parse(options, argv); + + if(commandLine.hasOption("help")){ + printCliUsage(); + return false; + } + } catch (ParseException e) { + System.err.println(e.getMessage()); + printCliUsage(); + return false; + } + return true; + } + + public void printCliUsage() { + new HelpFormatter().printHelp("hive", options); + } + + public CommandLine getCommandLine() { + return commandLine; + } + + public void setCommandLine(CommandLine commandLine) { + this.commandLine = commandLine; + } +} diff --git a/beeline/src/java/org/apache/hive/beeline/cli/HiveCli.java b/beeline/src/java/org/apache/hive/beeline/cli/HiveCli.java new file mode 100644 index 0000000..1e7f068 --- /dev/null +++ b/beeline/src/java/org/apache/hive/beeline/cli/HiveCli.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline.cli; + +import org.apache.hive.beeline.BeeLine; + +import java.io.IOException; +import java.io.InputStream; + +public class HiveCli { + private BeeLine beeLine; + + public static void main(String[] args) throws IOException { + int status = new HiveCli().runWithArgs(args, null); + System.exit(status); + } + + public int runWithArgs(String[] cmd, InputStream inputStream) throws IOException { + beeLine = new BeeLine(false); + return beeLine.begin(cmd, inputStream); + } +} diff --git a/beeline/src/test/org/apache/hive/beeline/TestClientCommandHookFactory.java b/beeline/src/test/org/apache/hive/beeline/TestClientCommandHookFactory.java new file mode 100644 index 0000000..c86de0a --- /dev/null +++ b/beeline/src/test/org/apache/hive/beeline/TestClientCommandHookFactory.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline; + +import junit.framework.Assert; +import org.junit.Test; + +public class TestClientCommandHookFactory { + @Test + public void testGetHook() { + Assert.assertNull(ClientCommandHookFactory.get().getHook("set a;")); + Assert.assertTrue(ClientCommandHookFactory.get() + .getHook("set a=b;") instanceof ClientCommandHookFactory.SetCommandHook); + Assert.assertTrue(ClientCommandHookFactory.get() + .getHook("USE a.b") instanceof ClientCommandHookFactory.UseCommandHook); + } +} diff --git a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java new file mode 100644 index 0000000..953ba5f --- /dev/null +++ b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java @@ -0,0 +1,289 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.beeline.cli; + +import junit.framework.Assert; +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.BufferedWriter; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.PrintStream; + +public class TestHiveCli { + private static final Log LOG = LogFactory.getLog(TestHiveCli.class.getName()); + private static final int ERRNO_OK = 0; + private static final int ERRNO_ARGS = 1; + private static final int ERRNO_OTHER = 2; + + private final static String SOURCE_CONTEXT = + "create table if not exists test.testSrcTbl(sc1 string);"; + private final static String SOURCE_CONTEXT2 = + "create table if not exists test.testSrcTbl2(sc2 string);"; + private final static String SOURCE_CONTEXT3 = + "create table if not exists test.testSrcTbl3(sc3 string);"; + private final static String SOURCE_CONTEXT4 = "show tables;!ls;show tables;\nquit;"; + final static String CMD = + "create database if not exists test;\ncreate table if not exists test.testTbl(a string, b " + + "string);\n"; + private HiveCli cli; + private OutputStream os; + private PrintStream ps; + private OutputStream errS; + private PrintStream errPs; + private File tmp = null; + + private void executeCMD(String[] args, String input, int retCode) { + InputStream inputStream = null; + int ret = 0; + try { + if (input != null) { + inputStream = IOUtils.toInputStream(input); + } + ret = cli.runWithArgs(args, inputStream); + } catch (Throwable e) { + LOG.error("Failed to execute command due to the error: " + e); + } finally { + if (retCode != ret) { + LOG.error("Failed due to the error:" + errS.toString()); + Assert.fail("Supported return code is " + retCode + " while the actual is " + ret); + } + } + } + + /** + * This method is used for verifying CMD to see whether the output contains the keywords provided. + * + * @param CMD + * @param keywords + * @param os + * @param options + * @param retCode + * @param contains + */ + private void verifyCMD(String CMD, String keywords, OutputStream os, String[] options, int retCode, + boolean contains) { + executeCMD(options, CMD, retCode); + String output = os.toString(); + LOG.debug(output); + if (contains) { + Assert.assertTrue("The expected keyword \"" + keywords + "\" occur in the output: " + output, + output.contains(keywords)); + } else { + Assert.assertFalse( + "The expected keyword \"" + keywords + "\" should be excluded occurred in the output: " + + output, output.contains(keywords)); + } + } + + @Test + public void testInValidCmd() { + verifyCMD("!lss\n", "Failed to execute lss", errS, null, ERRNO_OK, true); + } + + @Test + public void testCmd() { + verifyCMD("show tables;!ls;show tables;\n", "src", os, null, ERRNO_OK, true); + } + + @Test + public void testSetPromptValue() { + verifyCMD("set hive.cli.prompt=MYCLI;SHOW\nTABLES;", "MYCLI> ", os, null, + ERRNO_OK, true); + } + + @Test + public void testSetHeaderValue() { + verifyCMD( + "create database if not exists test;\ncreate table if not exists test.testTbl(a string, b string);\nset hive.cli.print.header=true;\n select * from test.testTbl;\n", + "testtbl.a testtbl.b", os, null, ERRNO_OK, true); + } + + @Test + public void testHelp() { + verifyCMD(null, "usage: hive", os, new String[] { "-H" }, ERRNO_ARGS, true); + } + + @Test + public void testInvalidDatabaseOptions() { + verifyCMD("\nshow tables;\nquit;\n", "Database does not exist: invalidDB", + errS, new String[] { "--database", "invalidDB" }, ERRNO_OK, true); + } + + @Test + public void testDatabaseOptions() { + verifyCMD("\nshow tables;\nquit;", "testtbl", os, + new String[] { "--database", "test" }, ERRNO_OK, true); + } + + @Test + public void testSourceCmd() { + File f = generateTmpFile(SOURCE_CONTEXT); + verifyCMD("source " + f.getPath() + ";" + "desc testSrcTbl;\nquit;\n", + "sc1", os, new String[] { "--database", "test" }, ERRNO_OK, true); + f.delete(); + } + + @Test + public void testSourceCmd2() { + File f = generateTmpFile(SOURCE_CONTEXT3); + verifyCMD("source " + f.getPath() + ";" + "desc testSrcTbl3;\nquit;\n", + "sc3", os, new String[] { "--database", "test" }, ERRNO_OK, true); + f.delete(); + } + + @Test + public void testSourceCmd3() { + File f = generateTmpFile(SOURCE_CONTEXT4); + verifyCMD("source " + f.getPath() + ";" + "desc testSrcTbl4;\nquit;\n", "src", os, + new String[] { "--database", "test" }, ERRNO_OK, true); + f.delete(); + } + + @Test + public void testSqlFromCmd() { + verifyCMD(null, "", os, new String[] { "-e", "show databases;" }, ERRNO_OK, true); + } + + @Test + public void testSqlFromCmdWithDBName() { + verifyCMD(null, "testtbl", os, + new String[] { "-e", "show tables;", "--database", "test" }, ERRNO_OK, true); + } + + @Test + public void testInvalidOptions() { + verifyCMD(null, + "The '-e' and '-f' options cannot be specified simultaneously", errS, + new String[] { "-e", "show tables;", "-f", "path/to/file" }, ERRNO_ARGS, true); + } + + @Test + public void testInvalidOptions2() { + verifyCMD(null, "Unrecognized option: -k", errS, new String[] { "-k" }, + ERRNO_ARGS, true); + } + + @Test + public void testVariables() { + verifyCMD( + "set system:xxx=5;\nset system:yyy=${system:xxx};\nset system:yyy;", "", os, null, ERRNO_OK, true); + } + + @Test + public void testVariablesForSource() { + File f = generateTmpFile(SOURCE_CONTEXT2); + verifyCMD( + "set hiveconf:zzz=" + f.getAbsolutePath() + ";\nsource ${hiveconf:zzz};\ndesc testSrcTbl2;", + "sc2", os, new String[] { "--database", "test" }, ERRNO_OK, true); + f.delete(); + } + + @Test + public void testErrOutput() { + verifyCMD("show tables;set system:xxx=5;set system:yyy=${system:xxx};\nlss;", + "cannot recognize input near 'lss' '' ''", errS, null, ERRNO_OK, true); + } + + @Test + public void testUseCurrentDB1() { + verifyCMD( + "create database if not exists testDB; set hive.cli.print.current.db=true;use testDB;\n" + + "use default;drop if exists testDB;", "hive (testDB)>", os, null, ERRNO_OK, true); + } + + @Test + public void testUseCurrentDB2() { + verifyCMD( + "create database if not exists testDB; set hive.cli.print.current.db=true;use\ntestDB;\nuse default;drop if exists testDB;", + "hive (testDB)>", os, null, ERRNO_OK, true); + } + + @Test + public void testUseCurrentDB3() { + verifyCMD( + "create database if not exists testDB; set hive.cli.print.current.db=true;use testDB;\n" + + "use default;drop if exists testDB;", "hive (testDB)>", os, null, ERRNO_OK, true); + } + + @Test + public void testUseInvalidDB() { + verifyCMD("set hive.cli.print.current.db=true;use invalidDB;", + "hive (invalidDB)>", os, null, ERRNO_OK, false); + } + + @Test + public void testNoErrorDB() { + verifyCMD(null, "Error: Method not supported (state=,code=0)", errS, new String[] { "-e", "show tables;" }, + ERRNO_OK, false); + } + + private void redirectOutputStream() { + // Setup output stream to redirect output to + os = new ByteArrayOutputStream(); + ps = new PrintStream(os); + errS = new ByteArrayOutputStream(); + errPs = new PrintStream(errS); + System.setOut(ps); + System.setErr(errPs); + } + + private void initFromFile() { + tmp = generateTmpFile(CMD); + if (tmp == null) { + Assert.fail("Fail to create the initial file"); + } + executeCMD(new String[] { "-f", "\"" + tmp.getAbsolutePath() + "\"" }, null, 0); + } + + private File generateTmpFile(String context) { + File file = null; + BufferedWriter bw = null; + try { + file = File.createTempFile("test", ".sql"); + bw = new BufferedWriter(new FileWriter(file)); + bw.write(context); + } catch (IOException e) { + LOG.error("Failed to write tmp file due to the exception: " + e); + } finally { + IOUtils.closeQuietly(bw); + } + return file; + } + + @Before + public void setup() { + cli = new HiveCli(); + initFromFile(); + redirectOutputStream(); + } + + @After + public void tearDown() { + tmp.delete(); + } +} diff --git a/beeline/src/test/resources/hive-site.xml b/beeline/src/test/resources/hive-site.xml new file mode 100644 index 0000000..d2df03c --- /dev/null +++ b/beeline/src/test/resources/hive-site.xml @@ -0,0 +1,37 @@ + + + + + + + hive.in.test + true + Internal marker for test. Used for masking env-dependent values + + + javax.jdo.option.ConnectionURL + jdbc:derby:;databaseName=${test.tmp.dir}/metastore_db;create=true + JDBC connect string for a JDBC metastore + + + + hive.metastore.warehouse.dir + ${test.tmp.dir}/warehouse + + + diff --git a/bin/ext/cli.sh b/bin/ext/cli.sh index 914aae3..96a69b0 100644 --- a/bin/ext/cli.sh +++ b/bin/ext/cli.sh @@ -16,13 +16,23 @@ THISSERVICE=cli export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} " +updateCli() { + if [ "$USE_DEPRECATED_CLI" == "true" ]; then + CLASS=org.apache.hadoop.hive.cli.CliDriver + JAR=hive-cli-*.jar + else + export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties" + CLASS=org.apache.hive.beeline.cli.HiveCli + JAR=hive-beeline-*.jar + fi +} + cli () { - CLASS=org.apache.hadoop.hive.cli.CliDriver - execHiveCmd $CLASS "$@" + updateCli + execHiveCmd $CLASS $JAR "$@" } cli_help () { - CLASS=org.apache.hadoop.hive.cli.CliDriver - execHiveCmd $CLASS "--help" -} - + updateCli + execHiveCmd $CLASS $JAR "--help" +} diff --git a/bin/ext/util/execHiveCmd.sh b/bin/ext/util/execHiveCmd.sh index 167cc40..e46ec3c 100644 --- a/bin/ext/util/execHiveCmd.sh +++ b/bin/ext/util/execHiveCmd.sh @@ -16,9 +16,11 @@ execHiveCmd () { CLASS=$1; shift; + JAR=$1 + shift; # cli specific code - if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then + if [ ! -f ${HIVE_LIB}/$JAR ]; then echo "Missing Hive CLI Jar" exit 3; fi @@ -28,5 +30,5 @@ execHiveCmd () { fi # hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf - exec $HADOOP jar ${HIVE_LIB}/hive-cli-*.jar $CLASS $HIVE_OPTS "$@" + exec $HADOOP jar ${HIVE_LIB}/$JAR $CLASS $HIVE_OPTS "$@" } diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index d62fd5c..4b52578 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -61,7 +61,9 @@ import org.apache.hadoop.hive.common.io.CachingPrintStream; import org.apache.hadoop.hive.common.io.FetchConverter; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.Validator; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; @@ -69,7 +71,6 @@ import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; import org.apache.hadoop.hive.ql.exec.tez.TezJobExecHelper; import org.apache.hadoop.hive.ql.parse.HiveParser; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; @@ -127,7 +128,12 @@ public int processCmd(String cmd) { } else if (tokens[0].equalsIgnoreCase("source")) { String cmd_1 = getFirstCmd(cmd_trimmed, tokens[0].length()); - cmd_1 = new VariableSubstitution().substitute(ss.getConf(), cmd_1); + cmd_1 = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), cmd_1); File sourceFile = new File(cmd_1); if (! sourceFile.isFile()){ @@ -145,7 +151,12 @@ public int processCmd(String cmd) { } else if (cmd_trimmed.startsWith("!")) { String shell_cmd = cmd_trimmed.substring(1); - shell_cmd = new VariableSubstitution().substitute(ss.getConf(), shell_cmd); + shell_cmd = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), shell_cmd); // shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'"; try { @@ -671,7 +682,12 @@ public int run(String[] args) throws Exception { // read prompt configuration and substitute variables. prompt = conf.getVar(HiveConf.ConfVars.CLIPROMPT); - prompt = new VariableSubstitution().substitute(conf, prompt); + prompt = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(conf, prompt); prompt2 = spacesForString(prompt); SessionState.start(ss); diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveVariableSource.java b/common/src/java/org/apache/hadoop/hive/conf/HiveVariableSource.java new file mode 100644 index 0000000..86f7a9c --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveVariableSource.java @@ -0,0 +1,24 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.conf; + +import java.util.Map; + +public interface HiveVariableSource { + Map getHiveVariable(); +} diff --git a/common/src/java/org/apache/hadoop/hive/conf/VariableSubstitution.java b/common/src/java/org/apache/hadoop/hive/conf/VariableSubstitution.java new file mode 100644 index 0000000..e1f53ba --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/conf/VariableSubstitution.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.conf; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; + +import java.util.Map; + +public class VariableSubstitution extends SystemVariables { + private static final Log l4j = LogFactory.getLog(VariableSubstitution.class); + + private HiveVariableSource hiveVariableSource; + + public VariableSubstitution(HiveVariableSource hiveVariableSource) { + this.hiveVariableSource = hiveVariableSource; + } + + /** + * The super method will handle with the case of substitutions for system variables, + * hive conf variables and env variables. In this method, it will retrieve the hive + * variables using hiveVariableSource. + * + * @param conf + * @param var + * @return + */ + @Override + protected String getSubstitute(Configuration conf, String var) { + String val = super.getSubstitute(conf, var); + if (val == null && hiveVariableSource != null) { + Map vars = hiveVariableSource.getHiveVariable(); + if (var.startsWith(HIVEVAR_PREFIX)) { + val = vars.get(var.substring(HIVEVAR_PREFIX.length())); + } else { + val = vars.get(var); + } + } + return val; + } + + public String substitute(HiveConf conf, String expr) { + if (expr == null) { + return expr; + } + if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEVARIABLESUBSTITUTE)) { + l4j.debug("Substitution is on: " + expr); + } else { + return expr; + } + int depth = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVEVARIABLESUBSTITUTEDEPTH); + return substitute(conf, expr, depth); + } +} diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java b/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java new file mode 100644 index 0000000..faa07a7 --- /dev/null +++ b/common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.conf; + +import junit.framework.Assert; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +public class TestVariableSubstitution { + private static class LocalMySource { + final Map map = new HashMap<>(); + + public void put(String k, String v) { + map.put(k, v); + } + + public String get(String k) { + return map.get(k); + } + } + + private static LocalMySource getMySource() { + return localSource.get(); + } + + private static ThreadLocal localSource = new ThreadLocal() { + @Override protected LocalMySource initialValue() { + return new LocalMySource(); + } + }; + + @Test public void testVariableSource() throws InterruptedException { + final VariableSubstitution variableSubstitution = + new VariableSubstitution(new HiveVariableSource() { + @Override public Map getHiveVariable() { + return TestVariableSubstitution.getMySource().map; + } + }); + + String v = variableSubstitution.substitute(new HiveConf(), "${a}"); + Assert.assertEquals("${a}", v); + TestVariableSubstitution.getMySource().put("a", "b"); + v = variableSubstitution.substitute(new HiveConf(), "${a}"); + Assert.assertEquals("b", v); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 4030075..9ee6023 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -41,6 +41,8 @@ import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -97,7 +99,6 @@ import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -389,8 +390,13 @@ public int compile(String command, boolean resetTaskIds) { try { // Initialize the transaction manager. This must be done before analyze is called. SessionState.get().initTxnMgr(conf); - - command = new VariableSubstitution().substitute(conf, command); + + command = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(conf, command); ctx = new Context(conf); ctx.setTryCount(getTryCount()); ctx.setCmd(command); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index 8b7a2e8..582ff32 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -28,12 +28,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; @@ -216,10 +217,10 @@ private String genPartValueString (String partKey, String partVal) throws Semant //for other usually not used types, just quote the value returnVal = "'" + partVal + "'"; } - + return returnVal; } - + private String getColTypeOf (String partKey) throws SemanticException{ for (FieldSchema fs : tbl.getPartitionKeys()) { @@ -333,7 +334,12 @@ private String genRewrittenQuery(List colNames, int numBitVectors, Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(conf, rewrittenQuery); return rewrittenQuery; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java deleted file mode 100644 index e8b1d96..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.parse; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.conf.SystemVariables; - -import java.util.Map; - -public class VariableSubstitution extends SystemVariables { - - private static final Log l4j = LogFactory.getLog(VariableSubstitution.class); - - @Override - protected String getSubstitute(Configuration conf, String var) { - String val = super.getSubstitute(conf, var); - if (val == null && SessionState.get() != null) { - Map vars = SessionState.get().getHiveVariables(); - if (var.startsWith(HIVEVAR_PREFIX)) { - val = vars.get(var.substring(HIVEVAR_PREFIX.length())); - } else { - val = vars.get(var); - } - } - return val; - } - - public String substitute(HiveConf conf, String expr) { - if (expr == null) { - return expr; - } - if (HiveConf.getBoolVar(conf, ConfVars.HIVEVARIABLESUBSTITUTE)) { - l4j.debug("Substitution is on: " + expr); - } else { - return expr; - } - int depth = HiveConf.getIntVar(conf, ConfVars.HIVEVARIABLESUBSTITUTEDEPTH); - return substitute(conf, expr, depth); - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java index 0558c53..d2ac993 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java @@ -19,11 +19,13 @@ package org.apache.hadoop.hive.ql.processors; import java.util.Arrays; +import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -45,7 +47,12 @@ public void init() { @Override public CommandProcessorResponse run(String command) { SessionState ss = SessionState.get(); - command = new VariableSubstitution().substitute(ss.getConf(),command); + command = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(),command); String[] tokens = command.split("\\s+"); SessionState.ResourceType t; if (tokens.length < 2 diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java index 25ce168..7b79f64 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; +import java.util.Map; import java.util.StringTokenizer; import java.util.concurrent.atomic.AtomicInteger; @@ -32,9 +33,10 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -142,7 +144,12 @@ public CommandProcessorResponse run(String command) throws CommandNeedRetryExcep @VisibleForTesting void parse(SessionState ss) throws CompileProcessorException { if (ss != null){ - command = new VariableSubstitution().substitute(ss.getConf(), command); + command = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), command); } if (command == null || command.length() == 0) { throw new CompileProcessorException("Command was empty"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java index 9052c82..736fa9c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java @@ -19,11 +19,13 @@ package org.apache.hadoop.hive.ql.processors; import java.util.Arrays; +import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -44,7 +46,12 @@ public void init() { @Override public CommandProcessorResponse run(String command) { SessionState ss = SessionState.get(); - command = new VariableSubstitution().substitute(ss.getConf(),command); + command = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), command); String[] tokens = command.split("\\s+"); SessionState.ResourceType t; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java index cc0414d..c3d5f81 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java @@ -20,14 +20,16 @@ import java.io.PrintStream; import java.util.Arrays; +import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsShell; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -65,7 +67,12 @@ public CommandProcessorResponse run(String command) { try { SessionState ss = SessionState.get(); - command = new VariableSubstitution().substitute(ss.getConf(),command); + command = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), command); String[] tokens = command.split("\\s+"); CommandProcessorResponse authErrResp = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java index 2414e12..06a83ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java @@ -30,10 +30,11 @@ import java.util.TreeMap; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.session.SessionState; /** @@ -121,17 +122,33 @@ public static int setVariable(String varname, String varvalue) throws Exception return 1; } else if (varname.startsWith(SYSTEM_PREFIX)){ String propName = varname.substring(SYSTEM_PREFIX.length()); - System.getProperties().setProperty(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue)); + System.getProperties() + .setProperty(propName, new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), varvalue)); } else if (varname.startsWith(HIVECONF_PREFIX)){ String propName = varname.substring(HIVECONF_PREFIX.length()); setConf(varname, propName, varvalue, false); } else if (varname.startsWith(HIVEVAR_PREFIX)) { String propName = varname.substring(HIVEVAR_PREFIX.length()); - ss.getHiveVariables().put(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue)); + ss.getHiveVariables().put(propName, new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), varvalue)); } else if (varname.startsWith(METACONF_PREFIX)) { String propName = varname.substring(METACONF_PREFIX.length()); Hive hive = Hive.get(ss.getConf()); - hive.setMetaConf(propName, new VariableSubstitution().substitute(ss.getConf(), varvalue)); + hive.setMetaConf(propName, new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(ss.getConf(), varvalue)); } else { setConf(varname, varname, varvalue, true); if (varname.equals(HiveConf.ConfVars.HIVE_SESSION_HISTORY_ENABLED.toString())) { @@ -145,7 +162,12 @@ public static int setVariable(String varname, String varvalue) throws Exception private static void setConf(String varname, String key, String varvalue, boolean register) throws IllegalArgumentException { HiveConf conf = SessionState.get().getConf(); - String value = new VariableSubstitution().substitute(conf, varvalue); + String value = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(conf, varvalue); if (conf.getBoolVar(HiveConf.ConfVars.HIVECONFVALIDATION)) { HiveConf.ConfVars confVars = HiveConf.getConfVars(key); if (confVars != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 7ed8e5f..d28a454 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -161,6 +161,11 @@ protected File tmpOutputFile; /** + * Temporary file name used to store error output of executing non-Hive commands (e.g., set, dfs) + */ + protected File tmpErrOutputFile; + + /** * type of the command. */ private HiveOperation commandType; @@ -285,6 +290,14 @@ public void setTmpOutputFile(File f) { tmpOutputFile = f; } + public File getTmpErrOutputFile() { + return tmpErrOutputFile; + } + + public void setTmpErrOutputFile(File tmpErrOutputFile) { + this.tmpErrOutputFile = tmpErrOutputFile; + } + public boolean getIsSilent() { if(conf != null) { return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); @@ -493,6 +506,14 @@ public static SessionState start(SessionState startSs) { } } + // Set temp file containing error output to be sent to client + if (startSs.getTmpErrOutputFile() == null) { + try { + startSs.setTmpErrOutputFile(createTempFile(startSs.getConf())); + } catch (IOException e) { + throw new RuntimeException(e); + } + } } catch (Exception e) { // Catch-all due to some exec time dependencies on session state // that would cause ClassNoFoundException otherwise diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index bcc66cf..1d1e995 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -66,13 +66,14 @@ protected HiveCommandOperation(HiveSession parentSession, String statement, private void setupSessionIO(SessionState sessionState) { try { - LOG.info("Putting temp output to file " + sessionState.getTmpOutputFile().toString()); + LOG.info("Putting temp output to file " + sessionState.getTmpOutputFile().toString() + + " and error output to file " + sessionState.getTmpErrOutputFile().toString()); sessionState.in = null; // hive server's session input stream is not used - // open a per-session file in auto-flush mode for writing temp results - sessionState.out = new PrintStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, "UTF-8"); - // TODO: for hadoop jobs, progress is printed out to session.err, - // we should find a way to feed back job progress to client - sessionState.err = new PrintStream(System.err, true, "UTF-8"); + // open a per-session file in auto-flush mode for writing temp results and tmp error output + sessionState.out = + new PrintStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, "UTF-8"); + sessionState.err = + new PrintStream(new FileOutputStream(sessionState.getTmpErrOutputFile()), true, "UTF-8"); } catch (IOException e) { LOG.error("Error in creating temp output file ", e); try { @@ -90,8 +91,7 @@ private void setupSessionIO(SessionState sessionState) { private void tearDownSessionIO() { - IOUtils.cleanup(LOG, parentSession.getSessionState().out); - IOUtils.cleanup(LOG, parentSession.getSessionState().err); + IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); } @Override @@ -202,6 +202,8 @@ private void cleanTmpFile() { SessionState sessionState = getParentSession().getSessionState(); File tmp = sessionState.getTmpOutputFile(); tmp.delete(); + tmp = sessionState.getTmpErrOutputFile(); + tmp.delete(); } private void resetResultReader() { diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index cc9df76..175348b 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -32,6 +32,8 @@ import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveVariableSource; +import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.CommandNeedRetryException; @@ -39,7 +41,6 @@ import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.OperationLog; import org.apache.hadoop.hive.ql.session.SessionState; @@ -105,7 +106,12 @@ public void prepare(HiveConf sqlOperationConf) throws HiveSQLException { // For now, we disable the test attempts. driver.setTryCount(Integer.MAX_VALUE); - String subStatement = new VariableSubstitution().substitute(sqlOperationConf, statement); + String subStatement = new VariableSubstitution(new HiveVariableSource() { + @Override + public Map getHiveVariable() { + return SessionState.get().getHiveVariables(); + } + }).substitute(sqlOperationConf, statement); response = driver.compileAndRespond(subStatement); if (0 != response.getResponseCode()) { throw toSQLException("Error while compiling statement", response); @@ -290,6 +296,10 @@ private void cleanup(OperationState state) throws HiveSQLException { if (ss.getTmpOutputFile() != null) { ss.getTmpOutputFile().delete(); } + + if (ss.getTmpErrOutputFile() != null) { + ss.getTmpErrOutputFile().delete(); + } } @Override