diff --git cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 3cdedba..17ef2db 100644 --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -32,7 +32,6 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.Set; import jline.ArgumentCompletor; import jline.ArgumentCompletor.AbstractArgumentDelimiter; @@ -150,24 +149,6 @@ public int processCmd(String cmd) { stringifyException(e)); ret = 1; } - } else if (tokens[0].toLowerCase().equals("list")) { - - SessionState.ResourceType t; - if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) { - console.printError("Usage: list [" - + StringUtils.join(SessionState.ResourceType.values(), "|") + "] [ []*]"); - ret = 1; - } else { - List filter = null; - if (tokens.length >= 3) { - System.arraycopy(tokens, 2, tokens, 0, tokens.length - 2); - filter = Arrays.asList(tokens); - } - Set s = ss.list_resource(t, filter); - if (s != null && !s.isEmpty()) { - ss.out.println(StringUtils.join(s, "\n")); - } - } } else if (ss.isRemoteMode()) { // remote mode -- connecting to remote hive server HiveClient client = ss.getClient(); PrintStream out = ss.out; diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 15bc0a3..570a383 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1546,7 +1546,7 @@ HIVE_SERVER2_SSL_KEYSTORE_PATH("hive.server2.keystore.path", "", ""), HIVE_SERVER2_SSL_KEYSTORE_PASSWORD("hive.server2.keystore.password", "", ""), - HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,delete,compile", + HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,compile", "Comma separated list of non-SQL Hive commands users are authorized to execute"), HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java index d616952..10316fb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java @@ -21,7 +21,8 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException; public interface CommandProcessor { - public void init(); - public CommandProcessorResponse run(String command) throws CommandNeedRetryException; + void init(); + + CommandProcessorResponse run(String command) throws CommandNeedRetryException; } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java index 70c76b1..0d237f0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java @@ -74,6 +74,8 @@ public static CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf) return new DfsProcessor(ss.getConf()); case ADD: return new AddResourceProcessor(); + case LIST: + return new ListResourceProcessor(); case DELETE: return new DeleteResourceProcessor(); case COMPILE: diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java index ae532f6..4a6dc61 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java @@ -30,6 +30,7 @@ RESET(), DFS(), ADD(), + LIST(), DELETE(), COMPILE(); private static final Set COMMANDS = new HashSet(); diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java new file mode 100644 index 0000000..e6160d4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/processors/ListResourceProcessor.java @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.processors; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Schema; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.session.SessionState; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT; +import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME; +import static org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString; + +public class ListResourceProcessor implements CommandProcessor { + + private static final Log LOG = LogFactory.getLog(ListResourceProcessor.class.getName()); + private static final SessionState.LogHelper console = new SessionState.LogHelper(LOG); + + private static final String LIST_COLUMN_NAME = "resource"; + private static final Schema SCHEMA; + + static { + SCHEMA = new Schema(); + SCHEMA.addToFieldSchemas(new FieldSchema(LIST_COLUMN_NAME, STRING_TYPE_NAME, null)); + SCHEMA.putToProperties(SERIALIZATION_NULL_FORMAT, defaultNullString); + } + + @Override + public void init() { + } + + @Override + public CommandProcessorResponse run(String command) throws CommandNeedRetryException { + SessionState ss = SessionState.get(); + String[] tokens = command.split("\\s+"); + SessionState.ResourceType t; + if (tokens.length < 1 || (t = ss.find_resource_type(tokens[0])) == null) { + String message = "Usage: list [" + + StringUtils.join(SessionState.ResourceType.values(), "|") + "] [ []*]"; + return new CommandProcessorResponse(1, message, null); + } + List filter = null; + if (tokens.length > 1) { + filter = Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)); + } + Set s = ss.list_resource(t, filter); + if (s != null && !s.isEmpty()) { + ss.out.println(StringUtils.join(s, "\n")); + } + return new CommandProcessorResponse(0, null, null, SCHEMA); + } +}