diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index d57dabb..7c6f515 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -101,6 +101,7 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; @@ -456,8 +457,9 @@ public int compile(String command, boolean resetTaskIds) { schema = getSchema(sem, conf); //do the authorization check - if (HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + if (!sem.skipAuthorization() && + HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + try { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); doAuthorization(sem); @@ -472,8 +474,6 @@ public int compile(String command, boolean resetTaskIds) { } } - //restore state after we're done executing a specific query - return 0; } catch (Exception e) { ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage()); @@ -500,34 +500,34 @@ public int compile(String command, boolean resetTaskIds) { } } - private void doAuthorization(BaseSemanticAnalyzer sem) - throws HiveException, AuthorizationException { + public static void doAuthorization(BaseSemanticAnalyzer sem) + throws HiveException, AuthorizationException { HashSet inputs = sem.getInputs(); HashSet outputs = sem.getOutputs(); SessionState ss = SessionState.get(); - HiveOperation op = ss.getHiveOperation(); + HiveOperation op = sem.getHiveOperation(); Hive db = sem.getDb(); if (ss.isAuthorizationModeV2()) { doAuthorizationV2(ss, op, inputs, outputs); return; } - if (op == null) { throw new HiveException("Operation should not be null"); } + HiveAuthorizationProvider authorizer = ss.getAuthorizer(); if (op.equals(HiveOperation.CREATEDATABASE)) { - ss.getAuthorizer().authorize( + authorizer.authorize( op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges()); } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { - ss.getAuthorizer().authorize( + authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } else { if (op.equals(HiveOperation.IMPORT)) { ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem; if (!isa.existsTable()) { - ss.getAuthorizer().authorize( + authorizer.authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } @@ -539,7 +539,7 @@ private void doAuthorization(BaseSemanticAnalyzer sem) continue; } if (write.getType() == Entity.Type.DATABASE) { - ss.getAuthorizer().authorize(write.getDatabase(), + authorizer.authorize(write.getDatabase(), null, op.getOutputRequiredPrivileges()); continue; } @@ -548,14 +548,14 @@ private void doAuthorization(BaseSemanticAnalyzer sem) Partition part = db.getPartition(write.getTable(), write .getPartition().getSpec(), false); if (part != null) { - ss.getAuthorizer().authorize(write.getPartition(), null, + authorizer.authorize(write.getPartition(), null, op.getOutputRequiredPrivileges()); continue; } } if (write.getTable() != null) { - ss.getAuthorizer().authorize(write.getTable(), null, + authorizer.authorize(write.getTable(), null, op.getOutputRequiredPrivileges()); } } @@ -646,7 +646,7 @@ private void doAuthorization(BaseSemanticAnalyzer sem) continue; } if (read.getType() == Entity.Type.DATABASE) { - ss.getAuthorizer().authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null); + authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null); continue; } Table tbl = read.getTable(); @@ -657,11 +657,11 @@ private void doAuthorization(BaseSemanticAnalyzer sem) if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) { List cols = part2Cols.get(partition); if (cols != null && cols.size() > 0) { - ss.getAuthorizer().authorize(partition.getTable(), + authorizer.authorize(partition.getTable(), partition, cols, op.getInputRequiredPrivileges(), null); } else { - ss.getAuthorizer().authorize(partition, + authorizer.authorize(partition, op.getInputRequiredPrivileges(), null); } continue; @@ -675,10 +675,10 @@ private void doAuthorization(BaseSemanticAnalyzer sem) !(tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE)) { List cols = tab2Cols.get(tbl); if (cols != null && cols.size() > 0) { - ss.getAuthorizer().authorize(tbl, null, cols, + authorizer.authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null); } else { - ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(), + authorizer.authorize(tbl, op.getInputRequiredPrivileges(), null); } tableAuthChecked.add(tbl.getTableName()); @@ -688,7 +688,7 @@ private void doAuthorization(BaseSemanticAnalyzer sem) } } - private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs, + private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs, HashSet outputs) throws HiveException { HiveOperationType hiveOpType = getHiveOperationType(op); List inputsHObjs = getHivePrivObjects(inputs); @@ -697,7 +697,7 @@ private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet getHivePrivObjects(HashSet privObjects) { + private static List getHivePrivObjects(HashSet privObjects) { List hivePrivobjs = new ArrayList(); if(privObjects == null){ return hivePrivobjs; @@ -748,9 +748,7 @@ private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet> tasks, return jsonOutput ? outJSONObject : null; } + private List toString(Collection objects) { + List list = new ArrayList(); + for (Object object : objects) { + list.add(String.valueOf(object)); + } + return list; + } + + private Object toJson(String header, String message, PrintStream out, ExplainWork work) + throws Exception { + if (work.isFormatted()) { + return message; + } + out.print(header); + out.println(": "); + out.print(indentString(2)); + out.println(message); + return null; + } + + private Object toJson(String header, List messages, PrintStream out, ExplainWork work) + throws Exception { + if (work.isFormatted()) { + return new JSONArray(messages); + } + out.print(header); + out.println(": "); + for (String message : messages) { + out.print(indentString(2)); + out.print(message); + out.println(); + } + return null; + } + @Override public int execute(DriverContext driverContext) { @@ -223,6 +266,14 @@ public int execute(DriverContext driverContext) { if (work.isFormatted()) { out.print(jsonLogicalPlan); } + } else if (work.isAuthorize()) { + JSONObject jsonAuth = collectAuthRelatedEntities(out, work); + if (work.isFormatted()) { + out.print(jsonAuth); + } + } else if (work.getDependency()) { + JSONObject jsonDependencies = getJSONDependencies(work); + out.print(jsonDependencies); } else { if (work.getDependency()) { JSONObject jsonDependencies = getJSONDependencies(work); @@ -249,7 +300,60 @@ public int execute(DriverContext driverContext) { } } - private String indentString(int indent) { + private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) + throws Exception { + + BaseSemanticAnalyzer analyzer = work.getAnalyzer(); + HiveOperation operation = analyzer.getHiveOperation(); + + JSONObject object = new JSONObject(); + Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work); + if (work.isFormatted()) { + object.put("INPUTS", jsonInput); + } + Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work); + if (work.isFormatted()) { + object.put("OUTPUTS", jsonOutput); + } + String userName = SessionState.get().getAuthenticator().getUserName(); + Object jsonUser = toJson("CURRENT_USER", userName, out, work); + if (work.isFormatted()) { + object.put("CURRENT_USER", jsonUser); + } + Object jsonOperation = toJson("OPERATION", operation.name(), out, work); + if (work.isFormatted()) { + object.put("OPERATION", jsonOperation); + } + if (analyzer.skipAuthorization()) { + return object; + } + HiveAuthorizationProvider delegate = SessionState.get().getAuthorizer(); + + final List exceptions = new ArrayList(); + HiveAuthorizationProvider authorizer = AuthorizationFactory.create(delegate, + new AuthorizationFactory.AuthorizationExceptionHandler() { + public void exception(AuthorizationException exception) { + exceptions.add(exception.getMessage()); + } + }); + + SessionState.get().setAuthorizer(authorizer); + try { + Driver.doAuthorization(analyzer); + } finally { + SessionState.get().setAuthorizer(delegate); + } + + if (!exceptions.isEmpty()) { + Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work); + if (work.isFormatted()) { + object.put("AUTHORIZATION_FAILURES", jsonFails); + } + } + return object; + } + + private static String indentString(int indent) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < indent; ++i) { sb.append(" "); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index db9fa74..75394f3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -21,8 +21,6 @@ import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.sql.Date; -import java.text.DateFormat; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -49,7 +47,6 @@ import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; @@ -70,6 +67,7 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.session.SessionState; @@ -107,6 +105,8 @@ public static int HIVE_COLUMN_ORDER_ASC = 1; public static int HIVE_COLUMN_ORDER_DESC = 0; + protected HiveOperation hiveOperation; + /** * ReadEntitites that are passed to the hooks. */ @@ -145,6 +145,18 @@ protected static final String PARQUETFILE_OUTPUT = MapredParquetOutputFormat.class.getName(); protected static final String PARQUETFILE_SERDE = ParquetHiveSerDe.class.getName(); + public HiveOperation getHiveOperation() { + return hiveOperation; + } + + public void setHiveOperation(HiveOperation hiveOperation) { + this.hiveOperation = hiveOperation; + } + + public boolean skipAuthorization() { + return false; + } + class RowFormatParams { String fieldDelim = null; String fieldEscape = null; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index 26863f1..83705d4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.FetchTask; +import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.ExplainWork; @@ -48,12 +49,20 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { boolean formatted = false; boolean dependency = false; boolean logical = false; - if (ast.getChildCount() == 2) { - int explainOptions = ast.getChild(1).getType(); - formatted = (explainOptions == HiveParser.KW_FORMATTED); - extended = (explainOptions == HiveParser.KW_EXTENDED); - dependency = (explainOptions == HiveParser.KW_DEPENDENCY); - logical = (explainOptions == HiveParser.KW_LOGICAL); + boolean authorize = false; + for (int i = 1; i < ast.getChildCount(); i++) { + int explainOptions = ast.getChild(i).getType(); + if (explainOptions == HiveParser.KW_FORMATTED) { + formatted = true; + } else if (explainOptions == HiveParser.KW_EXTENDED) { + extended = true; + } else if (explainOptions == HiveParser.KW_DEPENDENCY) { + dependency = true; + } else if (explainOptions == HiveParser.KW_LOGICAL) { + logical = true; + } else if (explainOptions == HiveParser.KW_AUTHORIZATION) { + authorize = true; + } } ctx.setExplain(true); @@ -87,11 +96,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { tasks, fetchTask, input.dump(), - sem.getInputs(), + sem, extended, formatted, dependency, - logical); + logical, + authorize); work.setAppendTaskType( HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES)); @@ -106,4 +116,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { public List getResultSchema() { return fieldList; } + + @Override + public boolean skipAuthorization() { + List> rootTasks = getRootTasks(); + assert rootTasks != null && rootTasks.size() == 1; + Task task = rootTasks.get(0); + return task instanceof ExplainTask && ((ExplainTask)task).getWork().isAuthorize(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index 38e8e25..412a046 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -295,6 +295,7 @@ KW_COMPACT: 'COMPACT'; KW_COMPACTIONS: 'COMPACTIONS'; KW_TRANSACTIONS: 'TRANSACTIONS'; KW_REWRITE : 'REWRITE'; +KW_AUTHORIZATION: 'AUTHORIZATION'; // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index af3ecd0..ee00685 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -619,13 +619,18 @@ statement explainStatement @init { pushMsg("explain statement", state); } @after { popMsg(state); } - : KW_EXPLAIN - ( (explainOptions=KW_EXTENDED|explainOptions=KW_FORMATTED|explainOptions=KW_DEPENDENCY|explainOptions=KW_LOGICAL)? execStatement - -> ^(TOK_EXPLAIN execStatement $explainOptions?) | - KW_REWRITE queryStatementExpression[true] -> ^(TOK_EXPLAIN_SQ_REWRITE queryStatementExpression) - ) + : KW_EXPLAIN ( + explainOption* execStatement -> ^(TOK_EXPLAIN execStatement explainOption*) + | + KW_REWRITE queryStatementExpression[true] -> ^(TOK_EXPLAIN_SQ_REWRITE queryStatementExpression)) ; +explainOption +@init { msgs.push("explain option"); } +@after { msgs.pop(); } + : KW_EXTENDED|KW_FORMATTED|KW_DEPENDENCY|KW_LOGICAL|KW_AUTHORIZATION + ; + execStatement @init { pushMsg("statement", state); } @after { popMsg(state); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index 5406412..9c001c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -538,5 +538,5 @@ functionIdentifier nonReserved : - KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE + KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION ; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index b6f3748..3c14af8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -147,10 +147,33 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) throws SemanticException { + BaseSemanticAnalyzer analyzer = getAnalyzer(conf, tree); + + HiveOperation operation; + if (tree.getType() == HiveParser.TOK_ALTERTABLE_PARTITION) { + Integer type = tree.getChild(1).getType(); + if (tree.getChild(0).getChildCount() > 1) { + operation = tablePartitionCommandType.get(type)[1]; + } else { + operation = tablePartitionCommandType.get(type)[0]; + } + } else { + operation = commandType.get(tree.getType()); + } + analyzer.setHiveOperation(operation); + + if (SessionState.get() != null) { + SessionState.get().setCommandType(operation); + } + + return analyzer; + } + + private static BaseSemanticAnalyzer getAnalyzer(HiveConf conf, ASTNode tree) + throws SemanticException { if (tree.getToken() == null) { throw new RuntimeException("Empty Syntax Tree"); } else { - setSessionCommandType(commandType.get(tree.getToken().getType())); switch (tree.getToken().getType()) { case HiveParser.TOK_EXPLAIN: @@ -232,14 +255,6 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_ALTERTABLE_PARTITION: - HiveOperation commandType = null; - Integer type = ((ASTNode) tree.getChild(1)).getToken().getType(); - if (tree.getChild(0).getChildCount() > 1) { - commandType = tablePartitionCommandType.get(type)[1]; - } else { - commandType = tablePartitionCommandType.get(type)[0]; - } - setSessionCommandType(commandType); return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_CREATEFUNCTION: @@ -258,12 +273,6 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) } } - private static void setSessionCommandType(HiveOperation commandType) { - if (SessionState.get() != null) { - SessionState.get().setCommandType(commandType); - } - } - private SemanticAnalyzerFactory() { // prevent instantiation } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java index d7140ca..f258d51 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; /** * ExplainWork. @@ -49,6 +50,9 @@ boolean appendTaskType; + boolean authorize; + + private transient BaseSemanticAnalyzer analyzer; public ExplainWork() { } @@ -58,21 +62,24 @@ public ExplainWork(Path resFile, List> rootTasks, Task fetchTask, String astStringTree, - HashSet inputs, + BaseSemanticAnalyzer analyzer, boolean extended, boolean formatted, boolean dependency, - boolean logical) { + boolean logical, + boolean authorize) { this.resFile = resFile; this.rootTasks = new ArrayList>(rootTasks); this.fetchTask = fetchTask; this.astStringTree = astStringTree; - this.inputs = inputs; + this.analyzer = analyzer; + this.inputs = analyzer.getInputs(); this.extended = extended; this.formatted = formatted; this.dependency = dependency; this.logical = logical; this.pCtx = pCtx; + this.authorize = authorize; } public Path getResFile() { @@ -162,4 +169,16 @@ public boolean isAppendTaskType() { public void setAppendTaskType(boolean appendTaskType) { this.appendTaskType = appendTaskType; } + + public boolean isAuthorize() { + return authorize; + } + + public void setAuthorize(boolean authorize) { + this.authorize = authorize; + } + + public BaseSemanticAnalyzer getAnalyzer() { + return analyzer; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java new file mode 100644 index 0000000..47c57db --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; + +public class AuthorizationFactory { + + public static HiveAuthorizationProvider create(HiveAuthorizationProvider delegated) { + return create(delegated, new DefaultAuthorizationExceptionHandler()); + } + + public static HiveAuthorizationProvider create(final HiveAuthorizationProvider delegated, + final AuthorizationExceptionHandler handler) { + + InvocationHandler invocation = new InvocationHandler() { + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + invokeAuth(method, args); + return null; + } + + private void invokeAuth(Method method, Object[] args) throws Throwable { + try { + method.invoke(delegated, args); + } catch (InvocationTargetException e) { + if (e.getTargetException() instanceof AuthorizationException) { + handler.exception((AuthorizationException) e.getTargetException()); + } + } + } + }; + + return (HiveAuthorizationProvider)Proxy.newProxyInstance( + AuthorizationFactory.class.getClassLoader(), + new Class[] {HiveAuthorizationProvider.class}, + invocation); + } + + public static interface AuthorizationExceptionHandler { + void exception(AuthorizationException exception) throws AuthorizationException; + } + + public static class DefaultAuthorizationExceptionHandler + implements AuthorizationExceptionHandler { + public void exception(AuthorizationException exception) { + throw exception; + } + } +} diff --git ql/src/test/queries/clientpositive/authorization_explain.q ql/src/test/queries/clientpositive/authorization_explain.q new file mode 100644 index 0000000..22bc894 --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_explain.q @@ -0,0 +1,7 @@ +set hive.security.authorization.enabled=true; + +explain authorization select * from src join srcpart; +explain formatted authorization select * from src join srcpart; + +explain authorization use default; +explain formatted authorization use default; diff --git ql/src/test/results/clientpositive/authorization_explain.q.out ql/src/test/results/clientpositive/authorization_explain.q.out new file mode 100644 index 0000000..3aeb170 --- /dev/null +++ ql/src/test/results/clientpositive/authorization_explain.q.out @@ -0,0 +1,42 @@ +Warning: Shuffle Join JOIN[4][tables = [src, srcpart]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain authorization select * from src join srcpart +PREHOOK: type: QUERY +POSTHOOK: query: explain authorization select * from src join srcpart +POSTHOOK: type: QUERY +INPUTS: + default@srcpart + default@srcpart@ds=2008-04-08/hr=11 + default@srcpart@ds=2008-04-08/hr=12 + default@srcpart@ds=2008-04-09/hr=11 + default@srcpart@ds=2008-04-09/hr=12 + default@src +OUTPUTS: +#### A masked pattern was here #### +CURRENT_USER: + hive_test_user +OPERATION: + QUERY +AUTHORIZATION_FAILURES: + No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key} + No privilege 'Select' found for inputs { database:default, table:src, columnName:key} +Warning: Shuffle Join JOIN[4][tables = [src, srcpart]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain formatted authorization select * from src join srcpart +PREHOOK: type: QUERY +POSTHOOK: query: explain formatted authorization select * from src join srcpart +POSTHOOK: type: QUERY +#### A masked pattern was here #### +PREHOOK: query: explain authorization use default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: explain authorization use default +POSTHOOK: type: SWITCHDATABASE +INPUTS: +OUTPUTS: +CURRENT_USER: + hive_test_user +OPERATION: + SWITCHDATABASE +PREHOOK: query: explain formatted authorization use default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: explain formatted authorization use default +POSTHOOK: type: SWITCHDATABASE +{"OUTPUTS":[],"INPUTS":[],"OPERATION":"SWITCHDATABASE","CURRENT_USER":"hive_test_user"}