diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 86db406..177ee0e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -102,6 +102,8 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; +import org.apache.hadoop.hive.ql.security.authorization.DelegatableAuthorizationProvider; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde2.ByteStream; @@ -486,11 +488,13 @@ public int compile(String command, boolean resetTaskIds) { schema = getSchema(sem, conf); //do the authorization check - if (HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + if (!sem.skipAuthorization() && + HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + try { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); - doAuthorization(sem); + doAuthorization(sem, + new AuthorizationFactory.DefaultAuthorizationExceptionHandler()); } catch (AuthorizationException authExp) { errorMessage = "Authorization failed:" + authExp.getMessage() + ". Use show grant to get more details."; @@ -501,8 +505,6 @@ public int compile(String command, boolean resetTaskIds) { } } - //restore state after we're done executing a specific query - return 0; } catch (Exception e) { ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage()); @@ -525,28 +527,30 @@ public int compile(String command, boolean resetTaskIds) { return error.getErrorCode(); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.COMPILE); - restoreSession(queryState); } } - private void doAuthorization(BaseSemanticAnalyzer sem) - throws HiveException, AuthorizationException { + public static void doAuthorization(BaseSemanticAnalyzer sem, + AuthorizationFactory.AuthorizationExceptionHandler handler) + throws HiveException, AuthorizationException { HashSet inputs = sem.getInputs(); HashSet outputs = sem.getOutputs(); SessionState ss = SessionState.get(); + DelegatableAuthorizationProvider authorizer = + AuthorizationFactory.create(ss.getAuthorizer(), handler); HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb(); if (op != null) { if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { - ss.getAuthorizer().authorize( + authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } else { if (op.equals(HiveOperation.IMPORT)) { ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem; if (!isa.existsTable()) { - ss.getAuthorizer().authorize( + authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } @@ -559,14 +563,14 @@ private void doAuthorization(BaseSemanticAnalyzer sem) Partition part = db.getPartition(write.getTable(), write .getPartition().getSpec(), false); if (part != null) { - ss.getAuthorizer().authorize(write.getPartition(), null, + authorizer.authorize(write.getPartition(), null, op.getOutputRequiredPrivileges()); continue; } } if (write.getTable() != null) { - ss.getAuthorizer().authorize(write.getTable(), null, + authorizer.authorize(write.getTable(), null, op.getOutputRequiredPrivileges()); } } @@ -658,11 +662,11 @@ private void doAuthorization(BaseSemanticAnalyzer sem) if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) { List cols = part2Cols.get(partition); if (cols != null && cols.size() > 0) { - ss.getAuthorizer().authorize(partition.getTable(), + authorizer.authorize(partition.getTable(), partition, cols, op.getInputRequiredPrivileges(), null); } else { - ss.getAuthorizer().authorize(partition, + authorizer.authorize(partition, op.getInputRequiredPrivileges(), null); } continue; @@ -676,10 +680,10 @@ private void doAuthorization(BaseSemanticAnalyzer sem) !(tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE)) { List cols = tab2Cols.get(tbl); if (cols != null && cols.size() > 0) { - ss.getAuthorizer().authorize(tbl, null, cols, + authorizer.authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null); } else { - ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(), + authorizer.authorize(tbl, op.getInputRequiredPrivileges(), null); } tableAuthChecked.add(tbl.getTableName()); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index f0da57d..02bff53 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -28,6 +28,7 @@ import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -39,14 +40,20 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExplainWork; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; import org.json.JSONArray; @@ -188,6 +195,41 @@ public JSONObject getJSONPlan(PrintStream out, ExplainWork work) return jsonOutput ? outJSONObject : null; } + private List toString(Collection objects) { + List list = new ArrayList(); + for (Object object : objects) { + list.add(String.valueOf(object)); + } + return list; + } + + private Object toJson(String header, String message, PrintStream out, ExplainWork work) + throws Exception { + if (work.isFormatted()) { + return message; + } + out.print(header); + out.println(": "); + out.print(indentString(2)); + out.println(message); + return null; + } + + private Object toJson(String header, List messages, PrintStream out, ExplainWork work) + throws Exception { + if (work.isFormatted()) { + return new JSONArray(messages); + } + out.print(header); + out.println(": "); + for (String message : messages) { + out.print(indentString(2)); + out.print(message); + out.println(); + } + return null; + } + @Override public int execute(DriverContext driverContext) { @@ -202,6 +244,15 @@ public int execute(DriverContext driverContext) { if (work.isFormatted()) { out.print(jsonLogicalPlan); } + } + if (work.isAuthorize()) { + JSONObject jsonAuth = getJsonAuth(out, work); + if (work.isFormatted()) { + out.print(jsonAuth); + } + } else if (work.getDependency()) { + JSONObject jsonDependencies = getJSONDependencies(work); + out.print(jsonDependencies); } else { if (work.getDependency()) { JSONObject jsonDependencies = getJSONDependencies(work); @@ -228,7 +279,49 @@ public int execute(DriverContext driverContext) { } } - private String indentString(int indent) { + private JSONObject getJsonAuth(PrintStream out, ExplainWork work) throws Exception { + + BaseSemanticAnalyzer analyzer = work.getAnalyzer(); + HiveOperation operation = analyzer.getHiveOperation(); + + JSONObject object = new JSONObject(); + Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work); + if (work.isFormatted()) { + object.put("INPUTS", jsonInput); + } + Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work); + if (work.isFormatted()) { + object.put("OUTPUTS", jsonOutput); + } + String userName = SessionState.get().getAuthenticator().getUserName(); + Object jsonUser = toJson("CURRENT_USER", userName, out, work); + if (work.isFormatted()) { + object.put("CURRENT_USER", jsonUser); + } + Object jsonOperation = toJson("OPERATION", operation.name(), out, work); + if (work.isFormatted()) { + object.put("OPERATION", jsonOperation); + } + if (analyzer.skipAuthorization()) { + return object; + } + final List exceptions = new ArrayList(); + Driver.doAuthorization(analyzer, + new AuthorizationFactory.AuthorizationExceptionHandler() { + public void exception(AuthorizationException exception) { + exceptions.add(exception.getMessage()); + } + }); + if (!exceptions.isEmpty()) { + Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work); + if (work.isFormatted()) { + object.put("AUTHORIZATION_FAILURES", jsonFails); + } + } + return object; + } + + private static String indentString(int indent) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < indent; ++i) { sb.append(" "); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index da80d81..cb79068 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -65,6 +65,7 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -77,7 +78,6 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.StringUtils; import com.google.common.annotations.VisibleForTesting; @@ -102,6 +102,8 @@ public static int HIVE_COLUMN_ORDER_ASC = 1; public static int HIVE_COLUMN_ORDER_DESC = 0; + protected HiveOperation hiveOperation; + /** * ReadEntitites that are passed to the hooks. */ @@ -136,6 +138,18 @@ protected static final String ORCFILE_SERDE = OrcSerde.class .getName(); + public HiveOperation getHiveOperation() { + return hiveOperation; + } + + public void setHiveOperation(HiveOperation hiveOperation) { + this.hiveOperation = hiveOperation; + } + + public boolean skipAuthorization() { + return false; + } + class RowFormatParams { String fieldDelim = null; String fieldEscape = null; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index 9b1c36e..3ffd9f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -25,6 +25,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.ExplainWork; @@ -48,12 +49,20 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { boolean formatted = false; boolean dependency = false; boolean logical = false; - if (ast.getChildCount() == 2) { - int explainOptions = ast.getChild(1).getType(); - formatted = (explainOptions == HiveParser.KW_FORMATTED); - extended = (explainOptions == HiveParser.KW_EXTENDED); - dependency = (explainOptions == HiveParser.KW_DEPENDENCY); - logical = (explainOptions == HiveParser.KW_LOGICAL); + boolean authorize = false; + for (int i = 1; i < ast.getChildCount(); i++) { + int explainOptions = ast.getChild(i).getType(); + if (explainOptions == HiveParser.KW_FORMATTED) { + formatted = true; + } else if (explainOptions == HiveParser.KW_EXTENDED) { + extended = true; + } else if (explainOptions == HiveParser.KW_DEPENDENCY) { + dependency = true; + } else if (explainOptions == HiveParser.KW_LOGICAL) { + logical = true; + } else if (explainOptions == HiveParser.KW_AUTHORIZE) { + authorize = true; + } } ctx.setExplain(true); @@ -82,11 +91,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { tasks, fetchTask, input.toStringTree(), - sem.getInputs(), + sem, extended, formatted, dependency, - logical); + logical, + authorize); work.setAppendTaskType( HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES)); @@ -101,4 +111,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { public List getResultSchema() { return fieldList; } + + @Override + public boolean skipAuthorization() { + List> rootTasks = getRootTasks(); + assert rootTasks != null && rootTasks.size() == 1; + Task task = rootTasks.get(0); + return task instanceof ExplainTask && ((ExplainTask)task).getWork().isAuthorize(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index 366b714..25e5f77 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -263,6 +263,7 @@ KW_USER: 'USER'; KW_ROLE: 'ROLE'; KW_INNER: 'INNER'; KW_EXCHANGE: 'EXCHANGE'; +KW_AUTHORIZE: 'AUTHORIZE'; // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 5e5b8cf..8c13d03 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -565,10 +565,16 @@ statement explainStatement @init { msgs.push("explain statement"); } @after { msgs.pop(); } - : KW_EXPLAIN (explainOptions=KW_EXTENDED|explainOptions=KW_FORMATTED|explainOptions=KW_DEPENDENCY|explainOptions=KW_LOGICAL)? execStatement - -> ^(TOK_EXPLAIN execStatement $explainOptions?) + : KW_EXPLAIN explainOption* execStatement + -> ^(TOK_EXPLAIN execStatement explainOption*) ; +explainOption +@init { msgs.push("explain option"); } +@after { msgs.pop(); } + : KW_EXTENDED|KW_FORMATTED|KW_DEPENDENCY|KW_LOGICAL|KW_AUTHORIZE + ; + execStatement @init { msgs.push("statement"); } @after { msgs.pop(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 97454e4..fa3f325 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -135,10 +135,30 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) throws SemanticException { + BaseSemanticAnalyzer analyzer = getAnalyzer(conf, tree); + + HiveOperation operation; + if (tree.getToken().getType() == HiveParser.TOK_ALTERTABLE_PARTITION) { + Integer type = ((ASTNode) tree.getChild(1)).getToken().getType(); + if (tree.getChild(0).getChildCount() > 1) { + operation = tablePartitionCommandType.get(type)[1]; + } else { + operation = tablePartitionCommandType.get(type)[0]; + } + } else { + operation = commandType.get(tree.getToken().getType()); + } + analyzer.setHiveOperation(operation); + setSessionCommandType(operation); + + return analyzer; + } + + private static BaseSemanticAnalyzer getAnalyzer(HiveConf conf, ASTNode tree) + throws SemanticException { if (tree.getToken() == null) { throw new RuntimeException("Empty Syntax Tree"); } else { - setSessionCommandType(commandType.get(tree.getToken().getType())); switch (tree.getToken().getType()) { case HiveParser.TOK_EXPLAIN: @@ -208,14 +228,6 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_EXCHANGEPARTITION: return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_ALTERTABLE_PARTITION: - HiveOperation commandType = null; - Integer type = ((ASTNode) tree.getChild(1)).getToken().getType(); - if (tree.getChild(0).getChildCount() > 1) { - commandType = tablePartitionCommandType.get(type)[1]; - } else { - commandType = tablePartitionCommandType.get(type)[0]; - } - setSessionCommandType(commandType); return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_CREATEFUNCTION: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java index 0cb6a9b..cba3bec 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; /** * ExplainWork. @@ -48,6 +49,9 @@ boolean appendTaskType; + boolean authorize; + + private transient BaseSemanticAnalyzer analyzer; public ExplainWork() { } @@ -57,21 +61,24 @@ public ExplainWork(String resFile, List> rootTasks, Task fetchTask, String astStringTree, - HashSet inputs, + BaseSemanticAnalyzer analyzer, boolean extended, boolean formatted, boolean dependency, - boolean logical) { + boolean logical, + boolean authorize) { this.resFile = resFile; this.rootTasks = new ArrayList>(rootTasks); this.fetchTask = fetchTask; this.astStringTree = astStringTree; - this.inputs = inputs; + this.analyzer = analyzer; + this.inputs = analyzer.getInputs(); this.extended = extended; this.formatted = formatted; this.dependency = dependency; this.logical = logical; this.pCtx = pCtx; + this.authorize = authorize; } public String getResFile() { @@ -161,4 +168,16 @@ public boolean isAppendTaskType() { public void setAppendTaskType(boolean appendTaskType) { this.appendTaskType = appendTaskType; } + + public boolean isAuthorize() { + return authorize; + } + + public void setAuthorize(boolean authorize) { + this.authorize = authorize; + } + + public BaseSemanticAnalyzer getAnalyzer() { + return analyzer; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java new file mode 100644 index 0000000..3f1a4df --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java @@ -0,0 +1,140 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.hooks.Entity; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class AuthorizationFactory { + + public static DelegatableAuthorizationProvider create(HiveAuthorizationProvider delegated) { + return create(delegated, new DefaultAuthorizationExceptionHandler()); + } + + public static DelegatableAuthorizationProvider create(final HiveAuthorizationProvider delegated, + final AuthorizationExceptionHandler handler) { + + final String user = delegated.getAuthenticator().getUserName(); + final Set owners = new HashSet(); + InvocationHandler invocation = new InvocationHandler() { + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + if (method.getName().equals(DelegatableAuthorizationProvider.AUTH_FOR)) { + getOwnersFrom((ReadEntity) args[0], owners, user); + return proxy; + } + if (owners.isEmpty()) { + invokeAuth(method, args); + return null; + } + HiveAuthenticationProvider prev = delegated.getAuthenticator(); + try { + for (String owner : owners) { + delegated.setAuthenticator(new OwnerAuthenticationProvider(owner)); + invokeAuth(method, args); + } + } finally { + owners.clear(); + delegated.setAuthenticator(prev); + } + return null; + } + + private void invokeAuth(Method method, Object[] args) throws Throwable { + try { + method.invoke(delegated, args); + } catch (InvocationTargetException e) { + if (e.getTargetException() instanceof AuthorizationException) { + handler.exception((AuthorizationException) e.getTargetException()); + } + } + } + + private void getOwnersFrom(ReadEntity readEntity, Set owners, String user) { + owners.clear(); + Set parents = readEntity.getParents(); + if (parents == null || parents.isEmpty()) { + return; + } + for (ReadEntity parent : parents) { + if (parent.getType() == Entity.Type.TABLE) { + owners.add(parent.getT().getTTable().getOwner()); + } else if (parent.getType() == Entity.Type.TABLE) { + owners.add(parent.getP().getTable().getTTable().getOwner()); + } + } + owners.remove(user); + } + }; + + return (DelegatableAuthorizationProvider)Proxy.newProxyInstance( + AuthorizationFactory.class.getClassLoader(), + new Class[] {DelegatableAuthorizationProvider.class}, + invocation); + } + + public static class OwnerAuthenticationProvider implements HiveAuthenticationProvider { + + private final String userName; + + private OwnerAuthenticationProvider(String userName) { + this.userName = userName; + } + + public String getUserName() { + return userName; + } + + public List getGroupNames() { + return null; + } + + public void destroy() throws HiveException { + } + + public void setConf(Configuration conf) { + } + + public Configuration getConf() { + return null; + } + } + + public static interface AuthorizationExceptionHandler { + void exception(AuthorizationException exception) throws AuthorizationException; + } + + public static class DefaultAuthorizationExceptionHandler + implements AuthorizationExceptionHandler { + public void exception(AuthorizationException exception) { + throw exception; + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DelegatableAuthorizationProvider.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DelegatableAuthorizationProvider.java new file mode 100644 index 0000000..1cf9a49 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DelegatableAuthorizationProvider.java @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.hive.ql.hooks.ReadEntity; + +public interface DelegatableAuthorizationProvider extends HiveAuthorizationProvider { + + String AUTH_FOR = "authorizeFor"; + + HiveAuthorizationProvider authorizeFor(ReadEntity readEntity); +} diff --git ql/src/test/queries/clientpositive/authorization_explain.q ql/src/test/queries/clientpositive/authorization_explain.q new file mode 100644 index 0000000..e3df2f6 --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_explain.q @@ -0,0 +1,7 @@ +set hive.security.authorization.enabled=true; + +explain authorize select * from src join srcpart; +explain formatted authorize select * from src join srcpart; + +explain authorize use default; +explain formatted authorize use default; diff --git ql/src/test/results/clientpositive/authorization_explain.q.out ql/src/test/results/clientpositive/authorization_explain.q.out new file mode 100644 index 0000000..edf27c7 --- /dev/null +++ ql/src/test/results/clientpositive/authorization_explain.q.out @@ -0,0 +1,42 @@ +PREHOOK: query: explain authorize select * from src join srcpart +PREHOOK: type: QUERY +POSTHOOK: query: explain authorize select * from src join srcpart +POSTHOOK: type: QUERY +INPUTS: + default@srcpart + default@srcpart@ds=2008-04-08/hr=11 + default@srcpart@ds=2008-04-08/hr=12 + default@srcpart@ds=2008-04-09/hr=11 + default@srcpart@ds=2008-04-09/hr=12 + default@src +OUTPUTS: +#### A masked pattern was here #### +CURRENT_USER: + hive_test_user +OPERATION: + QUERY +AUTHORIZATION_FAILURES: + No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key} + No privilege 'Select' found for inputs { database:default, table:src, columnName:key} + No privilege 'Select' found for inputs { database:default, table:src, columnName:key} +PREHOOK: query: explain formatted authorize select * from src join srcpart +PREHOOK: type: QUERY +POSTHOOK: query: explain formatted authorize select * from src join srcpart +POSTHOOK: type: QUERY +#### A masked pattern was here #### +PREHOOK: query: explain authorize use default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: explain authorize use default +POSTHOOK: type: SWITCHDATABASE +INPUTS: +OUTPUTS: +CURRENT_USER: + hive_test_user +OPERATION: + SWITCHDATABASE + SWITCHDATABASE +PREHOOK: query: explain formatted authorize use default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: explain formatted authorize use default +POSTHOOK: type: SWITCHDATABASE +{"OUTPUTS":[],"INPUTS":[],"OPERATION":"SWITCHDATABASE","CURRENT_USER":"hive_test_user"}