Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1722) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -137,6 +137,23 @@ TOK_HINTARGLIST; TOK_USERSCRIPTCOLNAMES; TOK_USERSCRIPTCOLSCHEMA; +TOK_CREATEUSER; +TOK_DROPUSER; +TOK_USER; +TOK_USRLIST; +TOK_ALLTABLEREF; +TOK_GRANT; +TOK_REVOKE; +TOK_PRIVLIST; +TOK_PRIVALL; +TOK_PRIVSEL; +TOK_PRIVINS; +TOK_PRIVCRT; +TOK_PRIVALT; +TOK_PRIVDROP; +TOK_PRIVCREATEUSER; +TOK_PRIVGRANT; +TOK_WITHOPTS; } @@ -176,6 +193,7 @@ : queryStatementExpression | loadStatement | ddlStatement + | dclStatement ; loadStatement @@ -196,8 +214,17 @@ | metastoreCheck | createFunctionStatement | dropFunctionStatement + | createUserStatement + | dropUserStatement ; +dclStatement +@init { msgs.push("dcl statement"); } +@after { msgs.pop(); } + : grantStatement + | revokeStatement + ; + ifNotExists @init { msgs.push("if not exists clause"); } @after { msgs.pop(); } @@ -337,6 +364,27 @@ -> ^(TOK_DROPFUNCTION Identifier) ; +createUserStatement +@init { msgs.push("create user statement"); } +@after { msgs.pop(); } + : KW_CREATE KW_USER userItem (COMMA userItem)* + -> ^(TOK_CREATEUSER userItem+) + ; + +userItem +@init { msgs.push("user item"); } +@after { msgs.pop(); } + : (Identifier (KW_IDENTIFIED KW_BY StringLiteral)?) + -> ^(TOK_USER Identifier StringLiteral?) + ; + +dropUserStatement +@init { msgs.push("drop user statement"); } +@after { msgs.pop(); } + : KW_DROP KW_USER Identifier (COMMA Identifier)* + -> ^(TOK_DROPUSER Identifier+) + ; + showStmtIdentifier @init { msgs.push("identifier for show statement"); } @after { msgs.pop(); } @@ -1147,6 +1195,60 @@ Identifier EQUAL constant -> ^(TOK_PARTVAL Identifier constant) ; +//------------------------------------ Rules for DCL ------------------------------------- + +grantStatement +@init { msgs.push("grant statement"); } +@after { msgs.pop(); } + : + KW_GRANT privilegeList KW_ON grantRevokeTarget KW_TO userList (KW_WITH withOptionList)? + -> ^(TOK_GRANT privilegeList grantRevokeTarget userList withOptionList?) + ; + +privilegeList +@init { msgs.push("privilege list"); } +@after { msgs.pop(); } + : privilegeType (COMMA privilegeType)* -> ^(TOK_PRIVLIST privilegeType+) + ; + +privilegeType + : KW_ALL KW_PRIVILEGES? -> TOK_PRIVALL + | KW_SELECT -> TOK_PRIVSEL + | KW_INSERT -> TOK_PRIVINS + | KW_CREATE -> TOK_PRIVCRT + | KW_ALTER -> TOK_PRIVALT + | KW_DROP -> TOK_PRIVDROP + | KW_CREATE KW_USER -> TOK_PRIVCREATEUSER + | KW_GRANT KW_OPTION -> TOK_PRIVGRANT + ; + +userList +@init { msgs.push("user list"); } +@after { msgs.pop(); } + : userItem (COMMA userItem)* -> ^(TOK_USRLIST userItem+) + ; + +grantRevokeTarget + : STAR -> TOK_ALLTABLEREF + | Identifier + ; + +withOptionList + : withOption (COMMA withOption)* -> ^(TOK_WITHOPTS withOption+) + ; + +withOption + : KW_GRANT KW_OPTION -> TOK_PRIVGRANT + ; + +revokeStatement +@init { msgs.push("revoke statement"); } +@after { msgs.pop(); } + : + KW_REVOKE privilegeList KW_ON grantRevokeTarget KW_FROM userList + -> ^(TOK_REVOKE privilegeList grantRevokeTarget userList) + ; + // Keywords KW_TRUE : 'TRUE'; KW_FALSE : 'FALSE'; @@ -1307,6 +1409,11 @@ KW_CONTINUE: 'CONTINUE'; KW_CURSOR: 'CURSOR'; KW_TRIGGER: 'TRIGGER'; +KW_USER: 'USER'; +KW_IDENTIFIED: 'IDENTIFIED'; +KW_PASSWORD: 'PASSWORD'; +KW_OPTION: 'OPTION'; +KW_PRIVILEGES: 'PRIVILEGES'; // Operators Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1722) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -50,6 +50,12 @@ case HiveParser.TOK_CREATEFUNCTION: case HiveParser.TOK_DROPFUNCTION: return new FunctionSemanticAnalyzer(conf); + case HiveParser.TOK_CREATEUSER: + case HiveParser.TOK_DROPUSER: + return new UserSemanticAnalyzer(conf); + case HiveParser.TOK_GRANT: + case HiveParser.TOK_REVOKE: + return new DCLSemanticAnalyzer(conf); default: return new SemanticAnalyzer(conf); } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/UserSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/UserSemanticAnalyzer.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/UserSemanticAnalyzer.java (revision 0) @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; + +public class UserSemanticAnalyzer extends BaseSemanticAnalyzer { + private static final Log LOG = + LogFactory.getLog("hive.ql.parse.UserSemanticAnalyzer"); + + public UserSemanticAnalyzer(HiveConf conf) throws SemanticException { + super(conf); + } + + public void analyzeInternal(ASTNode ast) throws SemanticException { + if (ast.getToken().getType() == HiveParser.TOK_CREATEUSER) + analyzeCreateUser(ast); + if (ast.getToken().getType() == HiveParser.TOK_DROPUSER) + analyzeDropUser(ast); + + LOG.info("analyze done"); + } + + private void analyzeCreateUser(ASTNode ast) + throws SemanticException { + for (int i = 0; i < ast.getChildCount(); ++i) { + ASTNode child = (ASTNode) ast.getChild(i); + String userName = child.getChild(0).getText(); + if(child.getChildCount() >= 2) { + String password = child.getChild(1).getText(); + } + } + // TODO: implement CREATE USER + } + + private void analyzeDropUser(ASTNode ast) + throws SemanticException { + for (int i = 0; i < ast.getChildCount(); ++i) { + String userName = ast.getChild(i).getText(); + System.out.println(userName); + } + // TODO: implement DROP USER + // dropUserDesc desc = new dropUserDesc(userName); + // rootTasks.add(TaskFactory.get(new UserWork(desc), conf)); + + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DCLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DCLSemanticAnalyzer.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DCLSemanticAnalyzer.java (revision 0) @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.plan.FunctionWork; +import org.apache.hadoop.hive.ql.plan.createFunctionDesc; +import org.apache.hadoop.hive.ql.plan.dropFunctionDesc; + +public class DCLSemanticAnalyzer extends BaseSemanticAnalyzer { + private static final Log LOG = + LogFactory.getLog("hive.ql.parse.DCLSemanticAnalyzer"); + + public DCLSemanticAnalyzer(HiveConf conf) throws SemanticException { + super(conf); + } + + public void analyzeInternal(ASTNode ast) throws SemanticException { + if (ast.getToken().getType() == HiveParser.TOK_GRANT) + analyzeGrant(ast); + if (ast.getToken().getType() == HiveParser.TOK_REVOKE) + analyzeRevoke(ast); + + LOG.info("analyze done"); + } + + private void analyzeGrant(ASTNode ast) + throws SemanticException { + // TODO: implement GRANT statement + } + + private void analyzeRevoke(ASTNode ast) + throws SemanticException { + // TODO: implement REVOKE statement + } +}