diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java index 547b3515c0..46eb092243 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java @@ -23,10 +23,10 @@ import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; /** * DDL task description for ALTER DATABASE commands. diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java index 29dc266ebf..848bfab6ad 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java @@ -33,6 +33,8 @@ public class ShowCreateDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + public static final String SCHEMA = "createdb_stmt#string"; + static { DDLTask2.registerOperation(ShowCreateDatabaseDesc.class, ShowCreateDatabaseOperation.class); } @@ -40,11 +42,6 @@ private final String resFile; private final String dbName; - /** - * Thrift ddl for the result of showcreatedatabase. - */ - public static final String SCHEMA = "createdb_stmt#string"; - public ShowCreateDatabaseDesc(String dbName, String resFile) { this.dbName = dbName; this.resFile = resFile; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java index 4814fd3e8c..e8f4e442d4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java @@ -33,7 +33,6 @@ public class ShowDatabasesDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - /** Thrift ddl for the result of show databases. */ public static final String SHOW_DATABASES_SCHEMA = "database_name#string"; static { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java index 7f1aa0c90e..b84c630149 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java @@ -33,18 +33,12 @@ public class DescFunctionDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + public static final String SCHEMA = "tab_name#string"; + static { DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class); } - /** - * Thrift ddl for the result of show tables. - */ - private static final String SCHEMA = "tab_name#string"; - public static String getSchema() { - return SCHEMA; - } - private final String resFile; private final String name; private final boolean isExtended; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java index 2affa32786..79074e8550 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java @@ -33,18 +33,12 @@ public class ShowFunctionsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + public static final String SCHEMA = "tab_name#string"; + static { DDLTask2.registerOperation(ShowFunctionsDesc.class, ShowFunctionsOperation.class); } - /** - * Thrift ddl for the result of show tables. - */ - private static final String SCHEMA = "tab_name#string"; - public static String getSchema() { - return SCHEMA; - } - private final String resFile; private final String pattern; private final boolean isLikePattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java index d76312d691..d23899c41e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java @@ -54,14 +54,14 @@ public int execute() throws HiveException { private Set fetchFunctions() { Set funcs = null; if (desc.getPattern() != null) { - LOG.debug("pattern: {}", desc.getPattern()); if (desc.getIsLikePattern()) { funcs = FunctionRegistry.getFunctionNamesByLikePattern(desc.getPattern()); } else { context.getConsole().printInfo("SHOW FUNCTIONS is deprecated, please use SHOW FUNCTIONS LIKE instead."); funcs = FunctionRegistry.getFunctionNames(desc.getPattern()); } - LOG.info("Found {} function(s) matching the SHOW FUNCTIONS statement.", funcs.size()); + LOG.info("Found {} function(s) using pattern {} matching the SHOW FUNCTIONS statement.", funcs.size(), + desc.getPattern()); } else { funcs = FunctionRegistry.getFunctionNames(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java new file mode 100644 index 0000000000..9641682d95 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for CREATE ROLE commands. + */ +@Explain(displayName = "Create Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class CreateRoleDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(CreateRoleDesc.class, CreateRoleOperation.class); + } + + private final String name; + + public CreateRoleDesc(String name) { + this.name = name; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getName() { + return name; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java new file mode 100644 index 0000000000..6782b02d20 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; + +/** + * Operation process of creating a role. + */ +public class CreateRoleOperation extends DDLOperation { + private final CreateRoleDesc desc; + + public CreateRoleOperation(DDLOperationContext context, CreateRoleDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + authorizer.createRole(desc.getName(), null); + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java new file mode 100644 index 0000000000..b8dcaacad7 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for DROP ROLE commands. + */ +@Explain(displayName = "Drop Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class DropRoleDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(DropRoleDesc.class, DropRoleOperation.class); + } + + private final String name; + + public DropRoleDesc(String name) { + this.name = name; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getName() { + return name; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java new file mode 100644 index 0000000000..e8b55ecf4c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; + +/** + * Operation process of dropping a role. + */ +public class DropRoleOperation extends DDLOperation { + private final DropRoleDesc desc; + + public DropRoleOperation(DDLOperationContext context, DropRoleDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + authorizer.dropRole(desc.getName()); + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java similarity index 56% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java index b5f9a69093..e641bf7b33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java @@ -16,118 +16,70 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.privilege; import java.io.Serializable; import java.util.List; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - +/** + * DDL task description for GRANT commands. + */ @Explain(displayName = "Grant", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class GrantDesc extends DDLDesc implements Serializable, Cloneable { - +public class GrantDesc implements DDLDesc, Serializable, Cloneable { private static final long serialVersionUID = 1L; - private List privileges; - - private List principals; - - private boolean grantOption; - - private String grantor; - - private PrincipalType grantorType; + static { + DDLTask2.registerOperation(GrantDesc.class, GrantOperation.class); + } - private PrivilegeObjectDesc privilegeSubjectDesc; + private final PrivilegeObjectDesc privilegeSubject; + private final List privileges; + private final List principals; + private final String grantor; + private final PrincipalType grantorType; + private final boolean grantOption; - public GrantDesc(PrivilegeObjectDesc privilegeSubject, - List privilegeDesc, List principalDesc, + public GrantDesc(PrivilegeObjectDesc privilegeSubject, List privileges, List principals, String grantor, PrincipalType grantorType, boolean grantOption) { - super(); - this.privilegeSubjectDesc = privilegeSubject; - this.privileges = privilegeDesc; - this.principals = principalDesc; + this.privilegeSubject = privilegeSubject; + this.privileges = privileges; + this.principals = principals; this.grantor = grantor; this.grantorType = grantorType; this.grantOption = grantOption; } - /** - * @return privileges - */ + @Explain(displayName="Privilege subject", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public PrivilegeObjectDesc getPrivilegeSubject() { + return privilegeSubject; + } + @Explain(displayName = "Privileges", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public List getPrivileges() { return privileges; } - /** - * @param privileges - */ - public void setPrivileges(List privileges) { - this.privileges = privileges; - } - - /** - * @return principals - */ @Explain(displayName = "Principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public List getPrincipals() { return principals; } - /** - * @param principals - */ - public void setPrincipals(List principals) { - this.principals = principals; - } - - /** - * @return grant option - */ - @Explain(displayName = "grant option", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public boolean isGrantOption() { - return grantOption; - } - - /** - * @param grantOption - */ - public void setGrantOption(boolean grantOption) { - this.grantOption = grantOption; - } - - /** - * @return privilege subject - */ - @Explain(displayName="privilege subject", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public PrivilegeObjectDesc getPrivilegeSubjectDesc() { - return privilegeSubjectDesc; - } - - /** - * @param privilegeSubjectDesc - */ - public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) { - this.privilegeSubjectDesc = privilegeSubjectDesc; - } - public String getGrantor() { return grantor; } - public void setGrantor(String grantor) { - this.grantor = grantor; - } - public PrincipalType getGrantorType() { return grantorType; } - public void setGrantorType(PrincipalType grantorType) { - this.grantorType = grantorType; + @Explain(displayName = "grant option", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public boolean isGrantOption() { + return grantOption; } - } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java new file mode 100644 index 0000000000..633ac434e0 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import java.util.List; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; + +/** + * Operation process of granting. + */ +public class GrantOperation extends DDLOperation { + private final GrantDesc desc; + + public GrantOperation(DDLOperationContext context, GrantDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + + //Convert to object types used by the authorization plugin interface + List hivePrincipals = AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), + RoleUtils.getAuthorizationTranslator(authorizer)); + List hivePrivileges = AuthorizationUtils.getHivePrivileges(desc.getPrivileges(), + RoleUtils.getAuthorizationTranslator(authorizer)); + HivePrivilegeObject hivePrivilegeObject = + RoleUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getPrivilegeSubject()); + HivePrincipal grantorPrincipal = new HivePrincipal(desc.getGrantor(), + AuthorizationUtils.getHivePrincipalType(desc.getGrantorType())); + + authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivilegeObject, grantorPrincipal, + desc.isGrantOption()); + + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java new file mode 100644 index 0000000000..e27931a478 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.util.List; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for GRANT ROLE commands. + */ +@Explain(displayName="Grant roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class GrantRoleDesc implements DDLDesc { + + static { + DDLTask2.registerOperation(GrantRoleDesc.class, GrantRoleOperation.class); + } + + private final List roles; + private final List principals; + private final String grantor; + private final boolean grantOption; + + public GrantRoleDesc(List roles, List principals, String grantor, boolean grantOption) { + this.principals = principals; + this.roles = roles; + this.grantor = grantor; + this.grantOption = grantOption; + } + + @Explain(displayName="principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public List getPrincipals() { + return principals; + } + + @Explain(displayName="roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public List getRoles() { + return roles; + } + + public String getGrantor() { + return grantor; + } + + public boolean isGrantOption() { + return grantOption; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java new file mode 100644 index 0000000000..e4a833e31a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; + +/** + * Operation process of granting a role. + */ +public class GrantRoleOperation extends DDLOperation { + private final GrantRoleDesc desc; + + public GrantRoleOperation(DDLOperationContext context, GrantRoleDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + + List principals = + AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), RoleUtils.getAuthorizationTranslator(authorizer)); + HivePrincipal grantorPrincipal = null; + if (desc.getGrantor() != null) { + grantorPrincipal = + new HivePrincipal(desc.getGrantor(), AuthorizationUtils.getHivePrincipalType(PrincipalType.USER)); + } + + authorizer.grantRole(principals, desc.getRoles(), desc.isGrantOption(), grantorPrincipal); + + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrincipalDesc.java similarity index 84% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrincipalDesc.java index 1d82b1902c..9c7b095edb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrincipalDesc.java @@ -16,49 +16,36 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.privilege; import java.io.Serializable; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - +/** + * Represents a database principal. + */ @Explain(displayName = "Principal", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class PrincipalDesc implements Serializable, Cloneable { - private static final long serialVersionUID = 1L; - private String name; - - private PrincipalType type; + private final String name; + private final PrincipalType type; public PrincipalDesc(String name, PrincipalType type) { - super(); this.name = name; this.type = type; } - public PrincipalDesc() { - super(); - } - @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getName() { return name; } - public void setName(String name) { - this.name = name; - } - @Explain(displayName="type", explainLevels = { Level.EXTENDED }) public PrincipalType getType() { return type; } - - public void setType(PrincipalType type) { - this.type = type; - } - } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeDesc.java similarity index 76% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeDesc.java index 1cb328a845..5fad35462a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeDesc.java @@ -16,22 +16,24 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.privilege; import java.io.Serializable; import java.util.List; import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - +/** + * Represents a database privilege. + */ @Explain(displayName = "Privilege", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class PrivilegeDesc implements Serializable, Cloneable { private static final long serialVersionUID = 1L; - private Privilege privilege; - - private List columns; + private final Privilege privilege; + private final List columns; public PrivilegeDesc(Privilege privilege, List columns) { super(); @@ -39,38 +41,13 @@ public PrivilegeDesc(Privilege privilege, List columns) { this.columns = columns; } - public PrivilegeDesc() { - super(); - } - - /** - * @return privilege definition - */ @Explain(displayName = "privilege", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public Privilege getPrivilege() { return privilege; } - /** - * @param privilege - */ - public void setPrivilege(Privilege privilege) { - this.privilege = privilege; - } - - /** - * @return columns on which the given privilege take affect. - */ @Explain(displayName = "columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public List getColumns() { return columns; } - - /** - * @param columns - */ - public void setColumns(List columns) { - this.columns = columns; - } - } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeObjectDesc.java similarity index 71% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeObjectDesc.java index f18a51b998..bc08c694a8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/PrivilegeObjectDesc.java @@ -16,34 +16,30 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.privilege; import java.util.HashMap; import java.util.List; -import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; +/** + * Represents a privilege object. + */ @Explain(displayName="privilege subject", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class PrivilegeObjectDesc { - //default type is table - private boolean table = true; - - private String object; - - private HashMap partSpec; - - private List columns; + private final boolean table; + private final String object; + private final HashMap partSpec; + private final List columns; - public PrivilegeObjectDesc(boolean isTable, String object, - HashMap partSpec) { - super(); + public PrivilegeObjectDesc(boolean isTable, String object, HashMap partSpec, List columns) { this.table = isTable; this.object = object; this.partSpec = partSpec; - } - - public PrivilegeObjectDesc() { + this.columns = columns; } @Explain(displayName="is table") @@ -51,33 +47,17 @@ public boolean getTable() { return table; } - public void setTable(boolean isTable) { - this.table = isTable; - } - @Explain(displayName="object", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getObject() { return object; } - public void setObject(String object) { - this.object = object; - } - @Explain(displayName="partition spec", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public HashMap getPartSpec() { return partSpec; } - public void setPartSpec(HashMap partSpec) { - this.partSpec = partSpec; - } - public List getColumns() { return columns; } - - public void setColumns(List columns) { - this.columns = columns; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java similarity index 52% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java index 0e0db1f22c..39ec28df4b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java @@ -16,40 +16,37 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.privilege; import java.io.Serializable; import java.util.List; -import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; +/** + * DDL task description for REVOKE commands. + */ @Explain(displayName="Revoke", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class RevokeDesc extends DDLDesc implements Serializable, Cloneable { - +public class RevokeDesc implements DDLDesc, Serializable, Cloneable { private static final long serialVersionUID = 1L; - private List privileges; - - private List principals; - - private PrivilegeObjectDesc privilegeSubjectDesc; - - private boolean grantOption; - - public RevokeDesc(){ + static { + DDLTask2.registerOperation(RevokeDesc.class, RevokeOperation.class); } - public RevokeDesc(List privileges, - List principals, PrivilegeObjectDesc privilegeSubjectDesc) { - this(privileges, principals, privilegeSubjectDesc, false); - } + private final List privileges; + private final List principals; + private final PrivilegeObjectDesc privilegeSubject; + private final boolean grantOption; - public RevokeDesc(List privileges, - List principals, PrivilegeObjectDesc privilegeSubjectDesc, boolean grantOption) { - super(); + public RevokeDesc(List privileges, List principals, + PrivilegeObjectDesc privilegeSubject, boolean grantOption) { this.privileges = privileges; this.principals = principals; - this.privilegeSubjectDesc = privilegeSubjectDesc; + this.privilegeSubject = privilegeSubject; this.grantOption = grantOption; } @@ -57,32 +54,15 @@ public RevokeDesc(List privileges, return privileges; } - public void setPrivileges(List privileges) { - this.privileges = privileges; - } - public List getPrincipals() { return principals; } - public void setPrincipals(List principals) { - this.principals = principals; - } - - public PrivilegeObjectDesc getPrivilegeSubjectDesc() { - return privilegeSubjectDesc; - } - - public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) { - this.privilegeSubjectDesc = privilegeSubjectDesc; + public PrivilegeObjectDesc getPrivilegeSubject() { + return privilegeSubject; } public boolean isGrantOption() { return grantOption; } - - public void setGrantOption(boolean grantOption) { - this.grantOption = grantOption; - } - } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java new file mode 100644 index 0000000000..bf4e01a191 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import java.util.List; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; + +/** + * Operation process of revoking. + */ +public class RevokeOperation extends DDLOperation { + private final RevokeDesc desc; + + public RevokeOperation(DDLOperationContext context, RevokeDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + + //Convert to object types used by the authorization plugin interface + List hivePrincipals = AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), + RoleUtils.getAuthorizationTranslator(authorizer)); + List hivePrivileges = AuthorizationUtils.getHivePrivileges(desc.getPrivileges(), + RoleUtils.getAuthorizationTranslator(authorizer)); + HivePrivilegeObject hivePrivilegeObject = + RoleUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getPrivilegeSubject()); + HivePrincipal grantorPrincipal = new HivePrincipal(null, null); + + authorizer.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivilegeObject, grantorPrincipal, + desc.isGrantOption()); + + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java new file mode 100644 index 0000000000..05507ec905 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.util.List; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for REVOKE ROLE commands. + */ +@Explain(displayName="Revoke roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class RevokeRoleDesc implements DDLDesc { + + static { + DDLTask2.registerOperation(RevokeRoleDesc.class, RevokeRoleOperation.class); + } + + private final List roles; + private final List principals; + private final String grantor; + private final boolean grantOption; + + public RevokeRoleDesc(List roles, List principals, String grantor, boolean grantOption) { + this.principals = principals; + this.roles = roles; + this.grantor = grantor; + this.grantOption = grantOption; + } + + @Explain(displayName="principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public List getPrincipals() { + return principals; + } + + @Explain(displayName="roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public List getRoles() { + return roles; + } + + public String getGrantor() { + return grantor; + } + + public boolean isGrantOption() { + return grantOption; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java new file mode 100644 index 0000000000..0b3b27695d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; + +/** + * Operation process of revoking a role. + */ +public class RevokeRoleOperation extends DDLOperation { + private final RevokeRoleDesc desc; + + public RevokeRoleOperation(DDLOperationContext context, RevokeRoleDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + + List principals = + AuthorizationUtils.getHivePrincipals(desc.getPrincipals(), RoleUtils.getAuthorizationTranslator(authorizer)); + HivePrincipal grantorPrincipal = null; + if (desc.getGrantor() != null) { + grantorPrincipal = + new HivePrincipal(desc.getGrantor(), AuthorizationUtils.getHivePrincipalType(PrincipalType.USER)); + } + + authorizer.revokeRole(principals, desc.getRoles(), desc.isGrantOption(), grantorPrincipal); + + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RoleUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RoleUtils.java new file mode 100644 index 0000000000..2565d471f1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RoleUtils.java @@ -0,0 +1,53 @@ +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; +import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Common utilities for Role related ddl operations. + */ +class RoleUtils { + static HiveAuthorizer getSessionAuthorizer(HiveConf conf) { + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + if (authorizer == null) { + authorizer = new HiveV1Authorizer(conf); + } + + return authorizer; + } + + static void writeListToFileAfterSort(List entries, String resFile, DDLOperationContext context) + throws IOException { + Collections.sort(entries); + + StringBuilder sb = new StringBuilder(); + for (String entry : entries) { + DDLUtils.appendNonNull(sb, entry, true); + } + + DDLUtils.writeToFile(sb.toString(), resFile, context); + } + + private static final HiveAuthorizationTranslator DEFAULT_AUTHORIZATION_TRANSLATOR = + new DefaultHiveAuthorizationTranslator(); + + static HiveAuthorizationTranslator getAuthorizationTranslator(HiveAuthorizer authorizer) + throws HiveAuthzPluginException { + if (authorizer.getHiveAuthorizationTranslator() == null) { + return DEFAULT_AUTHORIZATION_TRANSLATOR; + } else { + return (HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator(); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java new file mode 100644 index 0000000000..e3e1bb1a2a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for SET ROLE commands. + */ +@Explain(displayName = "Set Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class SetRoleDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(SetRoleDesc.class, SetRoleOperation.class); + } + + private final String name; + + public SetRoleDesc(String name) { + this.name = name; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getName() { + return name; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java new file mode 100644 index 0000000000..d119fe4a28 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; + +/** + * Operation process of setting a role. + */ +public class SetRoleOperation extends DDLOperation { + private final SetRoleDesc desc; + + public SetRoleOperation(DDLOperationContext context, SetRoleDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + authorizer.setCurrentRole(desc.getName()); + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java new file mode 100644 index 0000000000..ec3e60866a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for SHOW CURRENT ROLE commands. + */ +@Explain(displayName = "Show Current Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowCurrentRoleDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(ShowCurrentRoleDesc.class, ShowCurrentRoleOperation.class); + } + + private final String resFile; + + public ShowCurrentRoleDesc(String resFile) { + this.resFile = resFile; + } + + public String getResFile() { + return resFile; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java new file mode 100644 index 0000000000..9738ddbcc0 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; + +/** + * Operation process of showing the current role. + */ +public class ShowCurrentRoleOperation extends DDLOperation { + private final ShowCurrentRoleDesc desc; + + public ShowCurrentRoleOperation(DDLOperationContext context, ShowCurrentRoleDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException, IOException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + List roleNames = authorizer.getCurrentRoleNames(); + RoleUtils.writeListToFileAfterSort(roleNames, desc.getResFile(), context); + + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java similarity index 62% rename from ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java index 23d786f9f4..e3b33b48ea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java @@ -15,48 +15,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.plan; +package org.apache.hadoop.hive.ql.ddl.privilege; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - -@Explain(displayName="show grant desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class ShowGrantDesc { - - private PrincipalDesc principalDesc; - - private PrivilegeObjectDesc hiveObj; - - private String resFile; - - /** - * thrift ddl for the result of show grant. - */ - private static final String tabularSchema = +/** + * DDL task description for SHOW GRANT commands. + */ +@Explain(displayName="Show grant desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowGrantDesc implements DDLDesc { + public static final String SCHEMA = "database,table,partition,column,principal_name,principal_type,privilege," + "grant_option,grant_time,grantor#" + "string:string:string:string:string:string:string:boolean:bigint:string"; - public ShowGrantDesc(){ - } - - public ShowGrantDesc(String resFile, PrincipalDesc principalDesc, - PrivilegeObjectDesc subjectObj) { - this.resFile = resFile; - this.principalDesc = principalDesc; - this.hiveObj = subjectObj; + static { + DDLTask2.registerOperation(ShowGrantDesc.class, ShowGrantOperation.class); } - public static String getSchema() { - return tabularSchema; + private final String resFile; + private final PrincipalDesc principal; + private final PrivilegeObjectDesc hiveObj; + + public ShowGrantDesc(String resFile, PrincipalDesc principal, PrivilegeObjectDesc hiveObj) { + this.resFile = resFile; + this.principal = principal; + this.hiveObj = hiveObj; } @Explain(displayName="principal desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public PrincipalDesc getPrincipalDesc() { - return principalDesc; - } - - public void setPrincipalDesc(PrincipalDesc principalDesc) { - this.principalDesc = principalDesc; + return principal; } @Explain(displayName="object", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -64,15 +55,7 @@ public PrivilegeObjectDesc getHiveObj() { return hiveObj; } - public void setHiveObj(PrivilegeObjectDesc subjectObj) { - this.hiveObj = subjectObj; - } - public String getResFile() { return resFile; } - - public void setResFile(String resFile) { - this.resFile = resFile; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java new file mode 100644 index 0000000000..50b41800a1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; + +import java.io.IOException; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; + +/** + * Operation process of showing a grant. + */ +public class ShowGrantOperation extends DDLOperation { + private final ShowGrantDesc desc; + + public ShowGrantOperation(DDLOperationContext context, ShowGrantDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + try { + List privInfos = authorizer.showPrivileges( + RoleUtils.getAuthorizationTranslator(authorizer).getHivePrincipal(desc.getPrincipalDesc()), + RoleUtils.getAuthorizationTranslator(authorizer).getHivePrivilegeObject(desc.getHiveObj())); + boolean testMode = context.getConf().getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); + DDLUtils.writeToFile(writeGrantInfo(privInfos, testMode), desc.getResFile(), context); + } catch (IOException e) { + throw new HiveException("Error in show grant statement", e); + } + return 0; + } + + private String writeGrantInfo(List privileges, boolean testMode) { + if (CollectionUtils.isEmpty(privileges)) { + return ""; + } + + //sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(privileges, new Comparator() { + @Override + public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) { + int compare = o1.getObject().compareTo(o2.getObject()); + if (compare == 0) { + compare = o1.getPrincipal().compareTo(o2.getPrincipal()); + } + if (compare == 0) { + compare = o1.getPrivilege().compareTo(o2.getPrivilege()); + } + return compare; + } + }); + + StringBuilder builder = new StringBuilder(); + for (HivePrivilegeInfo privilege : privileges) { + HivePrincipal principal = privilege.getPrincipal(); + HivePrivilegeObject resource = privilege.getObject(); + HivePrincipal grantor = privilege.getGrantorPrincipal(); + + DDLUtils.appendNonNull(builder, resource.getDbname(), true); + DDLUtils.appendNonNull(builder, resource.getObjectName()); + DDLUtils.appendNonNull(builder, resource.getPartKeys()); + DDLUtils.appendNonNull(builder, resource.getColumns()); + DDLUtils.appendNonNull(builder, principal.getName()); + DDLUtils.appendNonNull(builder, principal.getType()); + DDLUtils.appendNonNull(builder, privilege.getPrivilege().getName()); + DDLUtils.appendNonNull(builder, privilege.isGrantOption()); + DDLUtils.appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L); + DDLUtils.appendNonNull(builder, grantor.getName()); + } + return builder.toString(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java new file mode 100644 index 0000000000..1844cf2162 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for SHOW PRINCIPALS commands. + */ +@Explain(displayName = "Show Principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowPrincipalsDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String SCHEMA = + "principal_name,principal_type,grant_option,grantor,grantor_type,grant_time#" + + "string:string:boolean:string:string:bigint"; + + static { + DDLTask2.registerOperation(ShowPrincipalsDesc.class, ShowPrincipalsOperation.class); + } + + private final String name; + private final String resFile; + + public ShowPrincipalsDesc(String name, String resFile) { + this.name = name; + this.resFile = resFile; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getName() { + return name; + } + + public String getResFile() { + return resFile; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java new file mode 100644 index 0000000000..392142ba14 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; + +/** + * Operation process of showing the principals. + */ +public class ShowPrincipalsOperation extends DDLOperation { + private final ShowPrincipalsDesc desc; + + public ShowPrincipalsOperation(DDLOperationContext context, ShowPrincipalsDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException, IOException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + boolean testMode = context.getConf().getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); + List roleGrants = authorizer.getPrincipalGrantInfoForRole(desc.getName()); + DDLUtils.writeToFile(writeHiveRoleGrantInfo(roleGrants, testMode), desc.getResFile(), context); + + return 0; + } + + private String writeHiveRoleGrantInfo(List roleGrants, boolean testMode) { + if (roleGrants == null || roleGrants.isEmpty()) { + return ""; + } + StringBuilder builder = new StringBuilder(); + // sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(roleGrants); + for (HiveRoleGrant roleGrant : roleGrants) { + // schema: principal_name,principal_type,grant_option,grantor,grantor_type,grant_time + DDLUtils.appendNonNull(builder, roleGrant.getPrincipalName(), true); + DDLUtils.appendNonNull(builder, roleGrant.getPrincipalType()); + DDLUtils.appendNonNull(builder, roleGrant.isGrantOption()); + DDLUtils.appendNonNull(builder, roleGrant.getGrantor()); + DDLUtils.appendNonNull(builder, roleGrant.getGrantorType()); + DDLUtils.appendNonNull(builder, testMode ? -1 : roleGrant.getGrantTime() * 1000L); + } + return builder.toString(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java new file mode 100644 index 0000000000..3b713a125a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for SHOW ROLE GRANT commands. + */ +@Explain(displayName = "Show Role Grant", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowRoleGrantDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String SCHEMA = + "role,grant_option,grant_time,grantor#" + + "string:boolean:bigint:string"; + + static { + DDLTask2.registerOperation(ShowRoleGrantDesc.class, ShowRoleGrantOperation.class); + } + + private final String name; + private final PrincipalType principalType; + private final String resFile; + + public ShowRoleGrantDesc(String name, PrincipalType principalType, String resFile) { + this.name = name; + this.principalType = principalType; + this.resFile = resFile; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getName() { + return name; + } + + public PrincipalType getPrincipalType() { + return principalType; + } + + public String getResFile() { + return resFile; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java new file mode 100644 index 0000000000..178ea8e3bc --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.DDLUtils; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; + +/** + * Operation process of showing the role grants. + */ +public class ShowRoleGrantOperation extends DDLOperation { + private final ShowRoleGrantDesc desc; + + public ShowRoleGrantOperation(DDLOperationContext context, ShowRoleGrantDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException, IOException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + boolean testMode = context.getConf().getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); + List roles = authorizer.getRoleGrantInfoForPrincipal( + AuthorizationUtils.getHivePrincipal(desc.getName(), desc.getPrincipalType())); + DDLUtils.writeToFile(writeRolesGrantedInfo(roles, testMode), desc.getResFile(), context); + + return 0; + } + + private String writeRolesGrantedInfo(List roles, boolean testMode) { + if (roles == null || roles.isEmpty()) { + return ""; + } + StringBuilder builder = new StringBuilder(); + //sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(roles); + for (HiveRoleGrant role : roles) { + DDLUtils.appendNonNull(builder, role.getRoleName(), true); + DDLUtils.appendNonNull(builder, role.isGrantOption()); + DDLUtils.appendNonNull(builder, testMode ? -1 : role.getGrantTime() * 1000L); + DDLUtils.appendNonNull(builder, role.getGrantor()); + } + return builder.toString(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java new file mode 100644 index 0000000000..36a0b1aa14 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for SHOW ROLES commands. + */ +@Explain(displayName = "Show Roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowRolesDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String SCHEMA = "role#string"; + + static { + DDLTask2.registerOperation(ShowRolesDesc.class, ShowRolesOperation.class); + } + + private final String resFile; + + public ShowRolesDesc(String resFile) { + this.resFile = resFile; + } + + public String getResFile() { + return resFile; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java new file mode 100644 index 0000000000..22ca7f350d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.privilege; + +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; + +/** + * Operation process of showing the roles. + */ +public class ShowRolesOperation extends DDLOperation { + private final ShowRolesDesc desc; + + public ShowRolesOperation(DDLOperationContext context, ShowRolesDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException, IOException { + HiveAuthorizer authorizer = RoleUtils.getSessionAuthorizer(context.getConf()); + List allRoles = authorizer.getAllRoles(); + RoleUtils.writeListToFileAfterSort(allRoles, desc.getResFile(), context); + return 0; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java index 0cfffd2032..bb533c2c85 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/DescTableDesc.java @@ -35,6 +35,14 @@ public class DescTableDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private static final String SCHEMA = "col_name,data_type,comment#string:string:string"; + private static final String COL_STATS_SCHEMA = "col_name,data_type,min,max,num_nulls," + + "distinct_count,avg_col_len,max_col_len,num_trues,num_falses,bitVector,comment" + + "#string:string:string:string:string:string:string:string:string:string:string:string"; + public static String getSchema(boolean colStats) { + return colStats ? COL_STATS_SCHEMA : SCHEMA; + } + static { DDLTask2.registerOperation(DescTableDesc.class, DescTableOperation.class); } @@ -82,19 +90,4 @@ public boolean isExt() { public boolean isFormatted() { return isFormatted; } - - /** - * thrift ddl for the result of describe table. - */ - private static final String SCHEMA = "col_name,data_type,comment#string:string:string"; - private static final String COL_STATS_SCHEMA = "col_name,data_type,min,max,num_nulls," - + "distinct_count,avg_col_len,max_col_len,num_trues,num_falses,bitVector,comment" - + "#string:string:string:string:string:string:string:string:string:string:string:string"; - - public static String getSchema(boolean colStats) { - if (colStats) { - return COL_STATS_SCHEMA; - } - return SCHEMA; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java index 8fa1ef16aa..a06f1fae5f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowCreateTableDesc.java @@ -32,15 +32,12 @@ public class ShowCreateTableDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + public static final String SCHEMA = "createtab_stmt#string"; + static { DDLTask2.registerOperation(ShowCreateTableDesc.class, ShowCreateTableOperation.class); } - /** - * Thrift ddl for the result of showcreatetable. - */ - public static final String SCHEMA = "createtab_stmt#string"; - private final String resFile; private final String tableName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java index 72caa58607..7ba1c2daef 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablePropertiesDesc.java @@ -32,15 +32,12 @@ public class ShowTablePropertiesDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + public static final String SCHEMA = "prpt_name,prpt_value#string:string"; + static { DDLTask2.registerOperation(ShowTablePropertiesDesc.class, ShowTablePropertiesOperation.class); } - /** - * Thrift ddl for the result of showtblproperties. - */ - public static final String SCHEMA = "prpt_name,prpt_value#string:string"; - private final String resFile; private final String tableName; private final String propertyName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java index 8c312a0c5e..6707350f34 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTableStatusDesc.java @@ -33,15 +33,12 @@ public class ShowTableStatusDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + public static final String SCHEMA = "tab_name#string"; + static { DDLTask2.registerOperation(ShowTableStatusDesc.class, ShowTableStatusOperation.class); } - /** - * Thrift ddl for the result of show tables. - */ - public static final String SCHEMA = "tab_name#string"; - private final String resFile; private final String dbName; private final String pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java index 584433b0a0..9ec390483d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/ShowTablesDesc.java @@ -34,25 +34,13 @@ public class ShowTablesDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowTablesDesc.class, ShowTablesOperation.class); - } - - /** - * thrift ddl for the result of show tables and show views. - */ private static final String TABLES_VIEWS_SCHEMA = "tab_name#string"; - - /** - * thrift ddl for the result of show extended tables. - */ private static final String EXTENDED_TABLES_SCHEMA = "tab_name,table_type#string,string"; + private static final String MATERIALIZED_VIEWS_SCHEMA = "mv_name,rewrite_enabled,mode#string:string:string"; - /** - * thrift ddl for the result of show tables. - */ - private static final String MATERIALIZED_VIEWS_SCHEMA = - "mv_name,rewrite_enabled,mode#string:string:string"; + static { + DDLTask2.registerOperation(ShowTablesDesc.class, ShowTablesOperation.class); + } private final String resFile; private final String dbName; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 269cd852bf..f88f6b2d61 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -21,7 +21,6 @@ import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; -import java.io.OutputStreamWriter; import java.io.Serializable; import java.net.URI; import java.net.URISyntaxException; @@ -43,7 +42,6 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; @@ -69,7 +67,6 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement; import org.apache.hadoop.hive.metastore.api.ShowLocksRequest; @@ -148,8 +145,6 @@ import org.apache.hadoop.hive.ql.plan.DropWMPoolDesc; import org.apache.hadoop.hive.ql.plan.DropWMTriggerDesc; import org.apache.hadoop.hive.ql.plan.FileMergeDesc; -import org.apache.hadoop.hive.ql.plan.GrantDesc; -import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; import org.apache.hadoop.hive.ql.plan.InsertCommitHookDesc; import org.apache.hadoop.hive.ql.plan.KillQueryDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; @@ -158,18 +153,12 @@ import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.OrcFileMergeDesc; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.plan.RCFileMergeDesc; import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc; import org.apache.hadoop.hive.ql.plan.ReplRemoveFirstIncLoadPendFlagDesc; -import org.apache.hadoop.hive.ql.plan.RevokeDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc; import org.apache.hadoop.hive.ql.plan.ShowConfDesc; -import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowResourcePlanDesc; @@ -177,17 +166,6 @@ import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; -import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; -import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -229,7 +207,6 @@ private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX; private MetaDataFormatter formatter; - private final HiveAuthorizationTranslator defaultAuthorizationTranslator = new DefaultHiveAuthorizationTranslator(); @Override public boolean requireLock() { @@ -353,35 +330,6 @@ public int execute(DriverContext driverContext) { return showConf(db, showConf); } - RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc(); - if (roleDDLDesc != null) { - return roleDDL(db, roleDDLDesc); - } - - GrantDesc grantDesc = work.getGrantDesc(); - if (grantDesc != null) { - return grantOrRevokePrivileges(db, grantDesc.getPrincipals(), grantDesc - .getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), grantDesc.getGrantor(), - grantDesc.getGrantorType(), grantDesc.isGrantOption(), true); - } - - RevokeDesc revokeDesc = work.getRevokeDesc(); - if (revokeDesc != null) { - return grantOrRevokePrivileges(db, revokeDesc.getPrincipals(), revokeDesc - .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), null, null, - revokeDesc.isGrantOption(), false); - } - - ShowGrantDesc showGrantDesc = work.getShowGrantDesc(); - if (showGrantDesc != null) { - return showGrants(db, showGrantDesc); - } - - GrantRevokeRoleDDL grantOrRevokeRoleDDL = work.getGrantRevokeRoleDDL(); - if (grantOrRevokeRoleDDL != null) { - return grantOrRevokeRole(db, grantOrRevokeRoleDDL); - } - AlterTablePartMergeFilesDesc mergeFilesDesc = work.getMergeFilesDesc(); if (mergeFilesDesc != null) { return mergeFiles(db, mergeFilesDesc, driverContext); @@ -803,165 +751,6 @@ private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc, return ret; } - private HiveAuthorizer getSessionAuthorizer(Hive db) { - HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); - if (authorizer == null) { - authorizer = new HiveV1Authorizer(conf, db); - } - return authorizer; - } - - private int grantOrRevokeRole(Hive db, GrantRevokeRoleDDL grantOrRevokeRoleDDL) - throws HiveException { - HiveAuthorizer authorizer = getSessionAuthorizer(db); - //convert to the types needed for plugin api - HivePrincipal grantorPrinc = null; - if(grantOrRevokeRoleDDL.getGrantor() != null){ - grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(), - AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType())); - } - List principals = AuthorizationUtils.getHivePrincipals( - grantOrRevokeRoleDDL.getPrincipalDesc(), getAuthorizationTranslator(authorizer)); - List roles = grantOrRevokeRoleDDL.getRoles(); - - boolean grantOption = grantOrRevokeRoleDDL.isGrantOption(); - if (grantOrRevokeRoleDDL.getGrant()) { - authorizer.grantRole(principals, roles, grantOption, grantorPrinc); - } else { - authorizer.revokeRole(principals, roles, grantOption, grantorPrinc); - } - return 0; - } - - private HiveAuthorizationTranslator getAuthorizationTranslator(HiveAuthorizer authorizer) - throws HiveAuthzPluginException { - if (authorizer.getHiveAuthorizationTranslator() == null) { - return defaultAuthorizationTranslator; - } else { - return (HiveAuthorizationTranslator)authorizer.getHiveAuthorizationTranslator(); - } - } - - private int showGrants(Hive db, ShowGrantDesc showGrantDesc) throws HiveException { - - HiveAuthorizer authorizer = getSessionAuthorizer(db); - try { - List privInfos = authorizer.showPrivileges( - getAuthorizationTranslator(authorizer).getHivePrincipal(showGrantDesc.getPrincipalDesc()), - getAuthorizationTranslator(authorizer).getHivePrivilegeObject(showGrantDesc.getHiveObj())); - boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); - writeToFile(writeGrantInfo(privInfos, testMode), showGrantDesc.getResFile()); - } catch (IOException e) { - throw new HiveException("Error in show grant statement", e); - } - return 0; - } - - private int grantOrRevokePrivileges(Hive db, List principals, - List privileges, PrivilegeObjectDesc privSubjectDesc, - String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant) - throws HiveException { - - HiveAuthorizer authorizer = getSessionAuthorizer(db); - - //Convert to object types used by the authorization plugin interface - List hivePrincipals = AuthorizationUtils.getHivePrincipals( - principals, getAuthorizationTranslator(authorizer)); - List hivePrivileges = AuthorizationUtils.getHivePrivileges( - privileges, getAuthorizationTranslator(authorizer)); - HivePrivilegeObject hivePrivObject = getAuthorizationTranslator(authorizer) - .getHivePrivilegeObject(privSubjectDesc); - - HivePrincipal grantorPrincipal = new HivePrincipal( - grantor, AuthorizationUtils.getHivePrincipalType(grantorType)); - - if(isGrant){ - authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject, - grantorPrincipal, grantOption); - }else { - authorizer.revokePrivileges(hivePrincipals, hivePrivileges, - hivePrivObject, grantorPrincipal, grantOption); - } - //no exception thrown, so looks good - return 0; - } - - private int roleDDL(Hive db, RoleDDLDesc roleDDLDesc) throws Exception { - HiveAuthorizer authorizer = getSessionAuthorizer(db); - RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); - //call the appropriate hive authorizer function - switch(operation){ - case CREATE_ROLE: - authorizer.createRole(roleDDLDesc.getName(), null); - break; - case DROP_ROLE: - authorizer.dropRole(roleDDLDesc.getName()); - break; - case SHOW_ROLE_GRANT: - boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); - List roles = authorizer.getRoleGrantInfoForPrincipal( - AuthorizationUtils.getHivePrincipal(roleDDLDesc.getName(), roleDDLDesc.getPrincipalType())); - writeToFile(writeRolesGrantedInfo(roles, testMode), roleDDLDesc.getResFile()); - break; - case SHOW_ROLES: - List allRoles = authorizer.getAllRoles(); - writeListToFileAfterSort(allRoles, roleDDLDesc.getResFile()); - break; - case SHOW_CURRENT_ROLE: - List roleNames = authorizer.getCurrentRoleNames(); - writeListToFileAfterSort(roleNames, roleDDLDesc.getResFile()); - break; - case SET_ROLE: - authorizer.setCurrentRole(roleDDLDesc.getName()); - break; - case SHOW_ROLE_PRINCIPALS: - testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); - List roleGrants = authorizer.getPrincipalGrantInfoForRole(roleDDLDesc.getName()); - writeToFile(writeHiveRoleGrantInfo(roleGrants, testMode), roleDDLDesc.getResFile()); - break; - default: - throw new HiveException("Unkown role operation " - + operation.getOperationName()); - } - - return 0; - } - - private String writeHiveRoleGrantInfo(List roleGrants, boolean testMode) { - if (roleGrants == null || roleGrants.isEmpty()) { - return ""; - } - StringBuilder builder = new StringBuilder(); - // sort the list to get sorted (deterministic) output (for ease of testing) - Collections.sort(roleGrants); - for (HiveRoleGrant roleGrant : roleGrants) { - // schema: - // principal_name,principal_type,grant_option,grantor,grantor_type,grant_time - appendNonNull(builder, roleGrant.getPrincipalName(), true); - appendNonNull(builder, roleGrant.getPrincipalType()); - appendNonNull(builder, roleGrant.isGrantOption()); - appendNonNull(builder, roleGrant.getGrantor()); - appendNonNull(builder, roleGrant.getGrantorType()); - appendNonNull(builder, testMode ? -1 : roleGrant.getGrantTime() * 1000L); - } - return builder.toString(); - } - - /** - * Write list of string entries into given file - * @param entries - * @param resFile - * @throws IOException - */ - private void writeListToFileAfterSort(List entries, String resFile) throws IOException { - Collections.sort(entries); - StringBuilder sb = new StringBuilder(); - for(String entry : entries){ - appendNonNull(sb, entry, true); - } - writeToFile(sb.toString(), resFile); - } - /** * Add a partitions to a table. * @@ -2298,93 +2087,6 @@ private int killQuery(Hive db, KillQueryDesc desc) throws HiveException { return 0; } - private void writeToFile(String data, String file) throws IOException { - Path resFile = new Path(file); - FileSystem fs = resFile.getFileSystem(conf); - FSDataOutputStream out = fs.create(resFile); - try { - if (data != null && !data.isEmpty()) { - OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8"); - writer.write(data); - writer.write((char) terminator); - writer.flush(); - } - } finally { - IOUtils.closeStream(out); - } - } - - private String writeGrantInfo(List privileges, boolean testMode) { - if (privileges == null || privileges.isEmpty()) { - return ""; - } - StringBuilder builder = new StringBuilder(); - //sort the list to get sorted (deterministic) output (for ease of testing) - Collections.sort(privileges, new Comparator() { - @Override - public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) { - int compare = o1.getObject().compareTo(o2.getObject()); - if (compare == 0) { - compare = o1.getPrincipal().compareTo(o2.getPrincipal()); - } - if (compare == 0) { - compare = o1.getPrivilege().compareTo(o2.getPrivilege()); - } - return compare; - } - }); - - for (HivePrivilegeInfo privilege : privileges) { - HivePrincipal principal = privilege.getPrincipal(); - HivePrivilegeObject resource = privilege.getObject(); - HivePrincipal grantor = privilege.getGrantorPrincipal(); - - appendNonNull(builder, resource.getDbname(), true); - appendNonNull(builder, resource.getObjectName()); - appendNonNull(builder, resource.getPartKeys()); - appendNonNull(builder, resource.getColumns()); - appendNonNull(builder, principal.getName()); - appendNonNull(builder, principal.getType()); - appendNonNull(builder, privilege.getPrivilege().getName()); - appendNonNull(builder, privilege.isGrantOption()); - appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L); - appendNonNull(builder, grantor.getName()); - } - return builder.toString(); - } - - private String writeRolesGrantedInfo(List roles, boolean testMode) { - if (roles == null || roles.isEmpty()) { - return ""; - } - StringBuilder builder = new StringBuilder(); - //sort the list to get sorted (deterministic) output (for ease of testing) - Collections.sort(roles); - for (HiveRoleGrant role : roles) { - appendNonNull(builder, role.getRoleName(), true); - appendNonNull(builder, role.isGrantOption()); - appendNonNull(builder, testMode ? -1 : role.getGrantTime() * 1000L); - appendNonNull(builder, role.getGrantor()); - } - return builder.toString(); - } - - private StringBuilder appendNonNull(StringBuilder builder, Object value) { - return appendNonNull(builder, value, false); - } - - private StringBuilder appendNonNull(StringBuilder builder, Object value, boolean firstColumn) { - if (!firstColumn) { - builder.append((char)separator); - } else if (builder.length() > 0) { - builder.append((char)terminator); - } - if (value != null) { - builder.append(value); - } - return builder; - } - /** * Alter a given table. * diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index c892b40224..343789584b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.DatabaseEvent; @@ -31,7 +32,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.ReplLoadOpType; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index d187d197a0..df7f3e94bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -83,6 +83,11 @@ import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.function.DescFunctionDesc; import org.apache.hadoop.hive.ql.ddl.function.ShowFunctionsDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowGrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowPrincipalsDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowRoleGrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowRolesDesc; import org.apache.hadoop.hive.ql.ddl.table.DescTableDesc; import org.apache.hadoop.hive.ql.ddl.table.DropTableDesc; import org.apache.hadoop.hive.ql.ddl.table.LockTableDesc; @@ -157,13 +162,10 @@ import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.plan.RenamePartitionDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc; import org.apache.hadoop.hive.ql.plan.ShowConfDesc; -import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowResourcePlanDesc; @@ -670,7 +672,7 @@ private void analyzeSetShowRole(ASTNode ast) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask( getInputs(), getOutputs(), ctx.getResFile())); - setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); + setFetchTask(createFetchTask(ShowRolesDesc.SCHEMA)); break; case 1: rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask( @@ -700,7 +702,7 @@ private void analyzeShowGrant(ASTNode ast) throws SemanticException { createShowGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); - setFetchTask(createFetchTask(ShowGrantDesc.getSchema())); + setFetchTask(createFetchTask(ShowGrantDesc.SCHEMA)); } } @@ -741,17 +743,17 @@ private void analyzeShowRoleGrant(ASTNode ast) throws SemanticException { createShowRoleGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); - setFetchTask(createFetchTask(RoleDDLDesc.getRoleShowGrantSchema())); + setFetchTask(createFetchTask(ShowRoleGrantDesc.SCHEMA)); } } private void analyzeShowRolePrincipals(ASTNode ast) throws SemanticException { - Task roleDDLTask = (Task) hiveAuthorizationTaskFactory + Task roleDDLTask = (Task) hiveAuthorizationTaskFactory .createShowRolePrincipalsTask(ast, ctx.getResFile(), getInputs(), getOutputs()); if (roleDDLTask != null) { rootTasks.add(roleDDLTask); - setFetchTask(createFetchTask(RoleDDLDesc.getShowRolePrincipalsSchema())); + setFetchTask(createFetchTask(ShowPrincipalsDesc.SCHEMA)); } } @@ -762,7 +764,7 @@ private void analyzeShowRoles(ASTNode ast) throws SemanticException { if (roleDDLTask != null) { rootTasks.add(roleDDLTask); - setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); + setFetchTask(createFetchTask(ShowRolesDesc.SCHEMA)); } } @@ -1605,7 +1607,6 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException { LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc, partSpec == null ? new HashMap<>() : partSpec); ltd.setLbCtx(lbCtx); - @SuppressWarnings("unchecked") Task moveTsk = TaskFactory.get(new MoveWork(null, null, ltd, null, false)); truncateTask.addDependentTask(moveTsk); @@ -2801,7 +2802,7 @@ private void analyzeShowFunctions(ASTNode ast) throws SemanticException { showFuncsDesc = new ShowFunctionsDesc(ctx.getResFile()); } rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showFuncsDesc))); - setFetchTask(createFetchTask(ShowFunctionsDesc.getSchema())); + setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA)); } /** @@ -3154,7 +3155,7 @@ private void analyzeDescFunction(ASTNode ast) throws SemanticException { DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), funcName, isExtended); rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descFuncDesc))); - setFetchTask(createFetchTask(DescFunctionDesc.getSchema())); + setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA)); } @@ -4111,7 +4112,7 @@ private void analyzeAltertableSkewedby(String[] qualified, ASTNode ast) throws S * Throw an error if the user tries to use the DDL with * hive.internal.ddl.list.bucketing.enable set to false. */ - HiveConf hiveConf = SessionState.get().getConf(); + SessionState.get().getConf(); Table tab = getTable(qualified); @@ -4255,7 +4256,7 @@ private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, * Throw an error if the user tries to use the DDL with * hive.internal.ddl.list.bucketing.enable set to false. */ - HiveConf hiveConf = SessionState.get().getConf(); + SessionState.get().getConf(); /** * Retrieve mappings from parser */ diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java index de5c90769a..c8f1246e86 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationParseUtils.java @@ -21,10 +21,10 @@ import java.util.List; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; /** * Utility functions for creating objects relevant for authorization operations diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index 18ed6fb418..be017dabc1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -19,6 +19,7 @@ import java.io.Serializable; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -27,6 +28,22 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.privilege.CreateRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.DropRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.GrantRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.RevokeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.RevokeRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.SetRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowCurrentRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowGrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowPrincipalsDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowRoleGrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowRolesDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -40,16 +57,6 @@ import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.GrantDesc; -import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; -import org.apache.hadoop.hive.ql.plan.RevokeDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc.RoleOperation; -import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType; @@ -70,15 +77,15 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { public Task createCreateRoleTask(ASTNode ast, HashSet inputs, HashSet outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); - RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.CREATE_ROLE, null); - return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); + CreateRoleDesc createRoleDesc = new CreateRoleDesc(roleName); + return TaskFactory.get(new DDLWork2(inputs, outputs, createRoleDesc)); } @Override public Task createDropRoleTask(ASTNode ast, HashSet inputs, HashSet outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); - RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.DROP_ROLE, null); - return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); + DropRoleDesc dropRoleDesc = new DropRoleDesc(roleName); + return TaskFactory.get(new DDLWork2(inputs, outputs, dropRoleDesc)); } @Override public Task createShowRoleGrantTask(ASTNode ast, Path resultFile, @@ -97,10 +104,8 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { break; } String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText()); - RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType, - RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null); - roleDesc.setResFile(resultFile.toString()); - return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); + ShowRoleGrantDesc showRoleGrantDesc = new ShowRoleGrantDesc(principalName, principalType, resultFile.toString()); + return TaskFactory.get(new DDLWork2(inputs, outputs, showRoleGrantDesc)); } @Override public Task createGrantTask(ASTNode ast, HashSet inputs, @@ -127,7 +132,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption); - return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc)); + return TaskFactory.get(new DDLWork2(inputs, outputs, grantDesc)); } @Override @@ -146,12 +151,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } } RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption); - return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc)); - } - @Override - public Task createGrantRoleTask(ASTNode ast, HashSet inputs, - HashSet outputs) { - return analyzeGrantRevokeRole(true, ast, inputs, outputs); + return TaskFactory.get(new DDLWork2(inputs, outputs, revokeDesc)); } @Override public Task createShowGrantTask(ASTNode ast, Path resultFile, HashSet inputs, @@ -171,17 +171,20 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { if (param != null) { if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { - privHiveObj = new PrivilegeObjectDesc(); + privHiveObj = new PrivilegeObjectDesc(true, null, null, null); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = parsePrivObject(param); } } - ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), - principalDesc, privHiveObj); - return TaskFactory.get(new DDLWork(inputs, outputs, showGrant)); + ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); + return TaskFactory.get(new DDLWork2(inputs, outputs, showGrant)); + } + @Override + public Task createGrantRoleTask(ASTNode ast, HashSet inputs, + HashSet outputs) { + return analyzeGrantRevokeRole(true, ast, inputs, outputs); } - @Override public Task createRevokeRoleTask(ASTNode ast, HashSet inputs, HashSet outputs) { @@ -211,10 +214,13 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { //until change is made to use the admin option. Default to false with V2 authorization - - GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant, - roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin); - return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL)); + if (isGrant) { + GrantRoleDesc grantRoleDesc = new GrantRoleDesc(roles, principalDesc, roleOwnerName, isAdmin); + return TaskFactory.get(new DDLWork2(inputs, outputs, grantRoleDesc)); + } else { + RevokeRoleDesc revokeRoleDesc = new RevokeRoleDesc(roles, principalDesc, roleOwnerName, isAdmin); + return TaskFactory.get(new DDLWork2(inputs, outputs, revokeRoleDesc)); + } } private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, @@ -237,29 +243,34 @@ private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast, } protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { - PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); + boolean isTable; + String object = null; + HashMap partSpec = null; + List columns = null; + + ASTNode child = (ASTNode) ast.getChild(0); ASTNode gchild = (ASTNode)child.getChild(0); if (child.getType() == HiveParser.TOK_TABLE_TYPE) { - subject.setTable(true); + isTable = true; String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); - subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); + object = BaseSemanticAnalyzer.getDotName(qualified); } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) { throw new SemanticException("Hive authorization does not support the URI or SERVER objects"); } else { - subject.setTable(false); - subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); + isTable = false; + object = BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText()); } //if partition spec node is present, set partition spec for (int i = 1; i < child.getChildCount(); i++) { gchild = (ASTNode) child.getChild(i); if (gchild.getType() == HiveParser.TOK_PARTSPEC) { - subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); + partSpec = DDLSemanticAnalyzer.getPartSpec(gchild); } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { - subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); + columns = BaseSemanticAnalyzer.getColumnNames(gchild); } } - return subject; + return new PrivilegeObjectDesc(isTable, object, partSpec, columns); } private List analyzePrivilegeListDef(ASTNode node) @@ -329,17 +340,16 @@ private String toMessage(ErrorMsg message, Object detail) { public Task createSetRoleTask(String roleName, HashSet inputs, HashSet outputs) throws SemanticException { - return TaskFactory.get(new DDLWork(inputs, outputs, new RoleDDLDesc(roleName, PrincipalType.ROLE, - RoleDDLDesc.RoleOperation.SET_ROLE, null))); + SetRoleDesc setRoleDesc = new SetRoleDesc(roleName); + return TaskFactory.get(new DDLWork2(inputs, outputs, setRoleDesc)); } @Override public Task createShowCurrentRoleTask( HashSet inputs, HashSet outputs, Path resFile) throws SemanticException { - RoleDDLDesc ddlDesc = new RoleDDLDesc(null, RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE); - ddlDesc.setResFile(resFile.toString()); - return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc)); + ShowCurrentRoleDesc showCurrentRoleDesc = new ShowCurrentRoleDesc(resFile.toString()); + return TaskFactory.get(new DDLWork2(inputs, outputs, showCurrentRoleDesc)); } @Override @@ -354,19 +364,15 @@ private String toMessage(ErrorMsg message, Object detail) { throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS"); } - RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, - RoleOperation.SHOW_ROLE_PRINCIPALS, null); - roleDDLDesc.setResFile(resFile.toString()); - return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc)); + ShowPrincipalsDesc showPrincipalsDesc = new ShowPrincipalsDesc(roleName, resFile.toString()); + return TaskFactory.get(new DDLWork2(inputs, outputs, showPrincipalsDesc)); } @Override public Task createShowRolesTask(ASTNode ast, Path resFile, HashSet inputs, HashSet outputs) throws SemanticException { - RoleDDLDesc showRolesDesc = new RoleDDLDesc(null, null, RoleDDLDesc.RoleOperation.SHOW_ROLES, - null); - showRolesDesc.setResFile(resFile.toString()); - return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc)); + ShowRolesDesc showRolesDesc = new ShowRolesDesc(resFile.toString()); + return TaskFactory.get(new DDLWork2(inputs, outputs, showRolesDesc)); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index 7162375cdf..edeaaa26e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -22,13 +22,13 @@ import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import java.io.Serializable; import java.util.Collections; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index 41b6db6e9d..b81aa2db4d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -26,12 +26,12 @@ import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import java.io.IOException; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java index d70353e358..8603521041 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ParseUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index c3863e19a7..e6f3a6f917 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -50,12 +50,6 @@ private AlterTableExchangePartition alterTableExchangePartition; private KillQueryDesc killQueryDesc; - private RoleDDLDesc roleDDLDesc; - private GrantDesc grantDesc; - private ShowGrantDesc showGrantDesc; - private RevokeDesc revokeDesc; - private GrantRevokeRoleDDL grantRevokeRoleDDL; - private ShowConfDesc showConfDesc; private CreateResourcePlanDesc createResourcePlanDesc; @@ -210,36 +204,6 @@ public DDLWork(HashSet inputs, HashSet outputs, msckDesc = checkDesc; } - public DDLWork(HashSet inputs, HashSet outputs, - RoleDDLDesc roleDDLDesc) { - this(inputs, outputs); - this.roleDDLDesc = roleDDLDesc; - } - - public DDLWork(HashSet inputs, HashSet outputs, - GrantDesc grantDesc) { - this(inputs, outputs); - this.grantDesc = grantDesc; - } - - public DDLWork(HashSet inputs, HashSet outputs, - ShowGrantDesc showGrant) { - this(inputs, outputs); - this.showGrantDesc = showGrant; - } - - public DDLWork(HashSet inputs, HashSet outputs, - RevokeDesc revokeDesc) { - this(inputs, outputs); - this.revokeDesc = revokeDesc; - } - - public DDLWork(HashSet inputs, HashSet outputs, - GrantRevokeRoleDDL grantRevokeRoleDDL) { - this(inputs, outputs); - this.grantRevokeRoleDDL = grantRevokeRoleDDL; - } - public DDLWork(HashSet inputs, HashSet outputs, AlterTablePartMergeFilesDesc mergeDesc) { this(inputs, outputs); @@ -452,35 +416,6 @@ public MsckDesc getMsckDesc() { return outputs; } - /** - * @return role ddl desc - */ - public RoleDDLDesc getRoleDDLDesc() { - return roleDDLDesc; - } - - /** - * @return grant desc - */ - public GrantDesc getGrantDesc() { - return grantDesc; - } - - /** - * @return show grant desc - */ - public ShowGrantDesc getShowGrantDesc() { - return showGrantDesc; - } - - public RevokeDesc getRevokeDesc() { - return revokeDesc; - } - - public GrantRevokeRoleDDL getGrantRevokeRoleDDL() { - return grantRevokeRoleDDL; - } - /** * @return descriptor for merging files */ diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java deleted file mode 100644 index 07529d9627..0000000000 --- ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.util.List; - -import org.apache.hadoop.hive.metastore.api.PrincipalType; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -@Explain(displayName="grant or revoke roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class GrantRevokeRoleDDL { - - private boolean grant; - - private List principalDesc; - - private List roles; - - private String grantor; - - private PrincipalType grantorType; - - private boolean grantOption; - - public GrantRevokeRoleDDL() { - } - - public GrantRevokeRoleDDL(boolean grant, List roles, - List principalDesc, String grantor, - PrincipalType grantorType, boolean grantOption) { - super(); - this.grant = grant; - this.principalDesc = principalDesc; - this.roles = roles; - this.grantor = grantor; - this.grantorType = grantorType; - this.grantOption = grantOption; - } - - /** - * @return grant or revoke privileges - */ - @Explain(displayName="grant (or revoke)", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public boolean getGrant() { - return grant; - } - - public void setGrant(boolean grant) { - this.grant = grant; - } - - /** - * @return a list of principals - */ - @Explain(displayName="principals", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public List getPrincipalDesc() { - return principalDesc; - } - - public void setPrincipalDesc(List principalDesc) { - this.principalDesc = principalDesc; - } - - /** - * @return a list of roles - */ - @Explain(displayName="roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public List getRoles() { - return roles; - } - - public void setRoles(List roles) { - this.roles = roles; - } - - public String getGrantor() { - return grantor; - } - - public void setGrantor(String grantor) { - this.grantor = grantor; - } - - public PrincipalType getGrantorType() { - return grantorType; - } - - public void setGrantorType(PrincipalType grantorType) { - this.grantorType = grantorType; - } - - public boolean isGrantOption() { - return grantOption; - } - - public void setGrantOption(boolean grantOption) { - this.grantOption = grantOption; - } - -} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java deleted file mode 100644 index afe7faf7fc..0000000000 --- ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -import org.apache.hadoop.hive.metastore.api.PrincipalType; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -@Explain(displayName = "Create Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class RoleDDLDesc extends DDLDesc implements Serializable { - - private static final long serialVersionUID = 1L; - - private String name; - - private PrincipalType principalType; - - private boolean group; - - private RoleOperation operation; - - private String resFile; - - private String roleOwnerName; - - /** - * thrift ddl for the result of show roles. - */ - private static final String roleNameSchema = "role#string"; - - /** - * thrift ddl for the result of show role grant principalName - */ - private static final String roleShowGrantSchema = - "role,grant_option,grant_time,grantor#" + - "string:boolean:bigint:string"; - - /** - * thrift ddl for the result of describe role roleName - */ - private static final String roleShowRolePrincipals = - "principal_name,principal_type,grant_option,grantor,grantor_type,grant_time#" + - "string:string:boolean:string:string:bigint"; - - public static String getRoleNameSchema() { - return roleNameSchema; - } - - public static String getRoleShowGrantSchema() { - return roleShowGrantSchema; - } - - public static String getShowRolePrincipalsSchema() { - return roleShowRolePrincipals; - } - - public static enum RoleOperation { - DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"), - SHOW_ROLES("show_roles"), SET_ROLE("set_role"), SHOW_CURRENT_ROLE("show_current_role"), - SHOW_ROLE_PRINCIPALS("show_role_principals"); - private String operationName; - - private RoleOperation() { - } - - private RoleOperation(String operationName) { - this.operationName = operationName; - } - - public String getOperationName() { - return operationName; - } - - @Override - public String toString () { - return this.operationName; - } - } - - public RoleDDLDesc(){ - } - - public RoleDDLDesc(String roleName, RoleOperation operation) { - this(roleName, PrincipalType.USER, operation, null); - } - - public RoleDDLDesc(String principalName, PrincipalType principalType, - RoleOperation operation, String roleOwnerName) { - this.name = principalName; - this.principalType = principalType; - this.operation = operation; - this.roleOwnerName = roleOwnerName; - } - - @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getName() { - return name; - } - - @Explain(displayName = "role operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public RoleOperation getOperation() { - return operation; - } - - public void setOperation(RoleOperation operation) { - this.operation = operation; - } - - public PrincipalType getPrincipalType() { - return principalType; - } - - public void setPrincipalType(PrincipalType principalType) { - this.principalType = principalType; - } - - public boolean getGroup() { - return group; - } - - public void setGroup(boolean group) { - this.group = group; - } - - public String getResFile() { - return resFile; - } - - public void setResFile(String resFile) { - this.resFile = resFile; - } - - public String getRoleOwnerName() { - return roleOwnerName; - } - - public void setRoleOwnerName(String roleOwnerName) { - this.roleOwnerName = roleOwnerName; - } - -} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java index f690422bfe..fbf8189498 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java @@ -27,15 +27,15 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java index 853dcf8a81..537b9de5db 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java @@ -20,11 +20,11 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java index 29ce9ed299..a0e5d66bfc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java @@ -19,10 +19,10 @@ import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; /** * This interface has functions that provide the ability to customize the translation diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java index fed0d0116e..68f73800b4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java @@ -21,14 +21,13 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ParseUtils; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.session.SessionState; - import org.junit.Assert; /** @@ -36,31 +35,15 @@ */ public class AuthorizationTestUtil { - /** - * Create DDLWork from given ast - * @param ast - * @param conf - * @param db - * @return - * @throws Exception - */ - public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception { + public static DDLWork2 analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception { DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db); SessionState.start(queryState.getConf()); analyzer.analyze(ast, new Context(queryState.getConf())); List> rootTasks = analyzer.getRootTasks(); - return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork(); + return (DDLWork2) inList(rootTasks).ofSize(1).get(0).getWork(); } - /** - * Create DDLWork from given command string - * @param command - * @param conf - * @param db - * @return - * @throws Exception - */ - public static DDLWork analyze(String command, QueryState queryState, Hive db) throws Exception { + public static DDLWork2 analyze(String command, QueryState queryState, Hive db) throws Exception { return analyze(parse(command), queryState, db); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java index a15fd5d0c0..308c8cff1a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java @@ -17,17 +17,15 @@ */ package org.apache.hadoop.hive.ql.parse.authorization; -import org.junit.Assert; - -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.GrantDesc; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType; +import org.junit.Assert; public class PrivilegesTestBase { protected static final String DB = "default"; @@ -37,8 +35,8 @@ public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db) throws Exception { - DDLWork work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db); - GrantDesc grantDesc = work.getGrantDesc(); + DDLWork2 work = AuthorizationTestUtil.analyze("GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db); + GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); //check privileges @@ -51,8 +49,8 @@ public static void grantUserTable(String privStr, PrivilegeType privType, QueryS Assert.assertEquals(PrincipalType.USER, principal.getType()); Assert.assertEquals(USER, principal.getName()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java index 9a8c032623..e7a1bd6156 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java @@ -24,26 +24,25 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.PrincipalType; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.privilege.CreateRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.DropRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.GrantRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.RevokeDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.RevokeRoleDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowGrantDesc; +import org.apache.hadoop.hive.ql.ddl.privilege.ShowRoleGrantDesc; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; -import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.GrantDesc; -import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; -import org.apache.hadoop.hive.ql.plan.PrincipalDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; -import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; -import org.apache.hadoop.hive.ql.plan.RevokeDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; -import org.apache.hadoop.hive.ql.plan.RoleDDLDesc.RoleOperation; -import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.session.SessionState; @@ -88,10 +87,7 @@ public static void reset() { private static final String ROLE = "role1"; private static final String USER = "user1"; - private ParseDriver parseDriver; - private DDLSemanticAnalyzer analyzer; private QueryState queryState; - private Context context; private String currentUser; private Hive db; private Table table; @@ -110,9 +106,6 @@ public void setup() throws Exception { table = new Table(DB, TABLE); partition = new Partition(table); SessionState.start(conf); - context = new Context(conf); - parseDriver = new ParseDriver(); - analyzer = new DDLSemanticAnalyzer(queryState, db); Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table); Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table); Mockito.when(db.getPartition(table, new HashMap(), false)) @@ -128,11 +121,9 @@ public void setup() throws Exception { */ @Test public void testCreateRole() throws Exception { - DDLWork work = analyze("CREATE ROLE " + ROLE); - RoleDDLDesc roleDesc = work.getRoleDDLDesc(); + DDLWork2 work = analyze("CREATE ROLE " + ROLE); + CreateRoleDesc roleDesc = (CreateRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); - Assert.assertEquals(RoleOperation.CREATE_ROLE, roleDesc.getOperation()); - Assert.assertFalse("Did not expect a group", roleDesc.getGroup()); Assert.assertEquals(ROLE, roleDesc.getName()); } /** @@ -140,11 +131,9 @@ public void testCreateRole() throws Exception { */ @Test public void testDropRole() throws Exception { - DDLWork work = analyze("DROp ROLE " + ROLE); - RoleDDLDesc roleDesc = work.getRoleDDLDesc(); + DDLWork2 work = analyze("DROp ROLE " + ROLE); + DropRoleDesc roleDesc = (DropRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); - Assert.assertEquals(RoleOperation.DROP_ROLE, roleDesc.getOperation()); - Assert.assertFalse("Did not expect a group", roleDesc.getGroup()); Assert.assertEquals(ROLE, roleDesc.getName()); } /** @@ -152,8 +141,8 @@ public void testDropRole() throws Exception { */ @Test public void testGrantUserTable() throws Exception { - DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER); - GrantDesc grantDesc = work.getGrantDesc(); + DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER); + GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.USER, principal.getType()); @@ -162,16 +151,16 @@ public void testGrantUserTable() throws Exception { for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } /** * GRANT ... ON TABLE ... TO ROLE ... */ @Test public void testGrantRoleTable() throws Exception { - DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO ROLE " + ROLE); - GrantDesc grantDesc = work.getGrantDesc(); + DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO ROLE " + ROLE); + GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.ROLE, principal.getType()); @@ -180,16 +169,16 @@ public void testGrantRoleTable() throws Exception { for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } /** * GRANT ... ON TABLE ... TO GROUP ... */ @Test public void testGrantGroupTable() throws Exception { - DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO GROUP " + GROUP); - GrantDesc grantDesc = work.getGrantDesc(); + DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO GROUP " + GROUP); + GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.GROUP, principal.getType()); @@ -198,16 +187,16 @@ public void testGrantGroupTable() throws Exception { for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } /** * REVOKE ... ON TABLE ... FROM USER ... */ @Test public void testRevokeUserTable() throws Exception { - DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM USER " + USER); - RevokeDesc grantDesc = work.getRevokeDesc(); + DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM USER " + USER); + RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc(); Assert.assertNotNull("Revoke should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.USER, principal.getType()); @@ -216,16 +205,16 @@ public void testRevokeUserTable() throws Exception { for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } /** * REVOKE ... ON TABLE ... FROM ROLE ... */ @Test public void testRevokeRoleTable() throws Exception { - DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM ROLE " + ROLE); - RevokeDesc grantDesc = work.getRevokeDesc(); + DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM ROLE " + ROLE); + RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc(); Assert.assertNotNull("Revoke should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.ROLE, principal.getType()); @@ -234,16 +223,16 @@ public void testRevokeRoleTable() throws Exception { for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } /** * REVOKE ... ON TABLE ... FROM GROUP ... */ @Test public void testRevokeGroupTable() throws Exception { - DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP); - RevokeDesc grantDesc = work.getRevokeDesc(); + DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP); + RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc(); Assert.assertNotNull("Revoke should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.GROUP, principal.getType()); @@ -252,25 +241,23 @@ public void testRevokeGroupTable() throws Exception { for(PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) { Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege()); } - Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable()); - Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject()); + Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubject().getTable()); + Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubject().getObject()); } /** * GRANT ROLE ... TO USER ... */ @Test public void testGrantRoleUser() throws Exception { - DDLWork work = analyze("GRANT ROLE " + ROLE + " TO USER " + USER); - GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL(); + DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO USER " + USER); + GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); - Assert.assertTrue("Expected grant ", grantDesc.getGrant()); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); Assert.assertEquals(currentUser, grantDesc.getGrantor()); - Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType()); for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) { Assert.assertEquals(ROLE, role); } - for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) { + for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.USER, principal.getType()); Assert.assertEquals(USER, principal.getName()); } @@ -280,17 +267,15 @@ public void testGrantRoleUser() throws Exception { */ @Test public void testGrantRoleRole() throws Exception { - DDLWork work = analyze("GRANT ROLE " + ROLE + " TO ROLE " + ROLE); - GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL(); + DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO ROLE " + ROLE); + GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); - Assert.assertTrue("Expected grant ", grantDesc.getGrant()); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); Assert.assertEquals(currentUser, grantDesc.getGrantor()); - Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType()); for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) { Assert.assertEquals(ROLE, role); } - for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) { + for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.ROLE, principal.getType()); Assert.assertEquals(ROLE, principal.getName()); } @@ -300,17 +285,15 @@ public void testGrantRoleRole() throws Exception { */ @Test public void testGrantRoleGroup() throws Exception { - DDLWork work = analyze("GRANT ROLE " + ROLE + " TO GROUP " + GROUP); - GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL(); + DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO GROUP " + GROUP); + GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); - Assert.assertTrue("Expected grant ", grantDesc.getGrant()); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); Assert.assertEquals(currentUser, grantDesc.getGrantor()); - Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType()); for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) { Assert.assertEquals(ROLE, role); } - for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) { + for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.GROUP, principal.getType()); Assert.assertEquals(GROUP, principal.getName()); } @@ -320,17 +303,15 @@ public void testGrantRoleGroup() throws Exception { */ @Test public void testRevokeRoleUser() throws Exception { - DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM USER " + USER); - GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL(); + DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM USER " + USER); + RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); - Assert.assertFalse("Did not expect grant ", grantDesc.getGrant()); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); Assert.assertEquals(currentUser, grantDesc.getGrantor()); - Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType()); for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) { Assert.assertEquals(ROLE, role); } - for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) { + for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.USER, principal.getType()); Assert.assertEquals(USER, principal.getName()); } @@ -340,17 +321,15 @@ public void testRevokeRoleUser() throws Exception { */ @Test public void testRevokeRoleRole() throws Exception { - DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE); - GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL(); + DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE); + RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); - Assert.assertFalse("Did not expect grant ", grantDesc.getGrant()); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); Assert.assertEquals(currentUser, grantDesc.getGrantor()); - Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType()); for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) { Assert.assertEquals(ROLE, role); } - for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) { + for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.ROLE, principal.getType()); Assert.assertEquals(ROLE, principal.getName()); } @@ -360,17 +339,15 @@ public void testRevokeRoleRole() throws Exception { */ @Test public void testRevokeRoleGroup() throws Exception { - DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM GROUP " + GROUP); - GrantRevokeRoleDDL grantDesc = work.getGrantRevokeRoleDDL(); + DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM GROUP " + GROUP); + RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); - Assert.assertFalse("Did not expect grant ", grantDesc.getGrant()); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); Assert.assertEquals(currentUser, grantDesc.getGrantor()); - Assert.assertEquals(PrincipalType.USER, grantDesc.getGrantorType()); for(String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) { Assert.assertEquals(ROLE, role); } - for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipalDesc()).ofSize(1)) { + for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { Assert.assertEquals(PrincipalType.GROUP, principal.getType()); Assert.assertEquals(GROUP, principal.getName()); } @@ -380,10 +357,9 @@ public void testRevokeRoleGroup() throws Exception { */ @Test public void testShowRoleGrantUser() throws Exception { - DDLWork work = analyze("SHOW ROLE GRANT USER " + USER); - RoleDDLDesc roleDesc = work.getRoleDDLDesc(); + DDLWork2 work = analyze("SHOW ROLE GRANT USER " + USER); + ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); - Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT, roleDesc.getOperation()); Assert.assertEquals(PrincipalType.USER, roleDesc.getPrincipalType()); Assert.assertEquals(USER, roleDesc.getName()); } @@ -392,10 +368,9 @@ public void testShowRoleGrantUser() throws Exception { */ @Test public void testShowRoleGrantRole() throws Exception { - DDLWork work = analyze("SHOW ROLE GRANT ROLE " + ROLE); - RoleDDLDesc roleDesc = work.getRoleDDLDesc(); + DDLWork2 work = analyze("SHOW ROLE GRANT ROLE " + ROLE); + ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); - Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT, roleDesc.getOperation()); Assert.assertEquals(PrincipalType.ROLE, roleDesc.getPrincipalType()); Assert.assertEquals(ROLE, roleDesc.getName()); } @@ -404,10 +379,9 @@ public void testShowRoleGrantRole() throws Exception { */ @Test public void testShowRoleGrantGroup() throws Exception { - DDLWork work = analyze("SHOW ROLE GRANT GROUP " + GROUP); - RoleDDLDesc roleDesc = work.getRoleDDLDesc(); + DDLWork2 work = analyze("SHOW ROLE GRANT GROUP " + GROUP); + ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); - Assert.assertEquals(RoleOperation.SHOW_ROLE_GRANT, roleDesc.getOperation()); Assert.assertEquals(PrincipalType.GROUP, roleDesc.getPrincipalType()); Assert.assertEquals(GROUP, roleDesc.getName()); } @@ -416,8 +390,8 @@ public void testShowRoleGrantGroup() throws Exception { */ @Test public void testShowGrantUserOnTable() throws Exception { - DDLWork work = analyze("SHOW GRANT USER " + USER + " ON TABLE " + TABLE); - ShowGrantDesc grantDesc = work.getShowGrantDesc(); + DDLWork2 work = analyze("SHOW GRANT USER " + USER + " ON TABLE " + TABLE); + ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Show grant should not be null", grantDesc); Assert.assertEquals(PrincipalType.USER, grantDesc.getPrincipalDesc().getType()); Assert.assertEquals(USER, grantDesc.getPrincipalDesc().getName()); @@ -430,8 +404,8 @@ public void testShowGrantUserOnTable() throws Exception { */ @Test public void testShowGrantRoleOnTable() throws Exception { - DDLWork work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE); - ShowGrantDesc grantDesc = work.getShowGrantDesc(); + DDLWork2 work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE); + ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Show grant should not be null", grantDesc); Assert.assertEquals(PrincipalType.ROLE, grantDesc.getPrincipalDesc().getType()); Assert.assertEquals(ROLE, grantDesc.getPrincipalDesc().getName()); @@ -444,8 +418,8 @@ public void testShowGrantRoleOnTable() throws Exception { */ @Test public void testShowGrantGroupOnTable() throws Exception { - DDLWork work = analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE " + TABLE); - ShowGrantDesc grantDesc = work.getShowGrantDesc(); + DDLWork2 work = analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE " + TABLE); + ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Show grant should not be null", grantDesc); Assert.assertEquals(PrincipalType.GROUP, grantDesc.getPrincipalDesc().getType()); Assert.assertEquals(GROUP, grantDesc.getPrincipalDesc().getName()); @@ -482,7 +456,7 @@ public void testGrantServer() throws Exception { } } - private DDLWork analyze(String command) throws Exception { + private DDLWork2 analyze(String command) throws Exception { return AuthorizationTestUtil.analyze(command, queryState, db); } diff --git ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out index f328beb5af..d72cb25724 100644 --- ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out +++ ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out @@ -4,4 +4,4 @@ POSTHOOK: query: set role ADMIN POSTHOOK: type: SHOW_ROLES PREHOOK: query: create role default PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Role name cannot be one of the reserved roles: [ALL, DEFAULT, NONE] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Role name cannot be one of the reserved roles: [ALL, DEFAULT, NONE] diff --git ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out index 8bc747eac0..f1b469f7fb 100644 --- ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out +++ ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out @@ -55,4 +55,4 @@ public testrole PREHOOK: query: create role TESTRoLE PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error create role: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException Role testrole already exists. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error create role: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException Role testrole already exists. diff --git ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out index 981c8cd691..9faf5bc7b1 100644 --- ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out +++ ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out @@ -1,3 +1,3 @@ PREHOOK: query: create role r1 PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_test_user is not allowed to add roles. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed to add roles. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out index 8383f52312..e5474ac312 100644 --- ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out +++ ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out @@ -4,4 +4,4 @@ POSTHOOK: query: set role admin POSTHOOK: type: SHOW_ROLES PREHOOK: query: drop role admin PREHOOK: type: DROPROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error dropping role: public,admin roles can't be dropped. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error dropping role: public,admin roles can't be dropped. diff --git ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out index 637167b9ba..e03796d492 100644 --- ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out +++ ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out @@ -22,4 +22,4 @@ POSTHOOK: type: SHOW_ROLES public PREHOOK: query: drop role r1 PREHOOK: type: DROPROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_admin_user is not allowed to drop role. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_admin_user is not allowed to drop role. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out index 330a06c2e3..ebfa03cd5d 100644 --- ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out +++ ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out @@ -61,4 +61,4 @@ POSTHOOK: type: SHOW_ROLES PREHOOK: query: grant all on table tpriv_current_role to user user5 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@tpriv_current_role -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role]] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role]] diff --git ql/src/test/results/clientnegative/authorization_public_create.q.out ql/src/test/results/clientnegative/authorization_public_create.q.out index 7defa82871..5aaf75d89d 100644 --- ql/src/test/results/clientnegative/authorization_public_create.q.out +++ ql/src/test/results/clientnegative/authorization_public_create.q.out @@ -1,3 +1,3 @@ PREHOOK: query: create role public PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implicitly exists. It can't be created.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implicitly exists. It can't be created.) diff --git ql/src/test/results/clientnegative/authorization_public_drop.q.out ql/src/test/results/clientnegative/authorization_public_drop.q.out index 6aaa1ffd15..003a5f1497 100644 --- ql/src/test/results/clientnegative/authorization_public_drop.q.out +++ ql/src/test/results/clientnegative/authorization_public_drop.q.out @@ -1,3 +1,3 @@ PREHOOK: query: drop role public PREHOOK: type: DROPROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.) diff --git ql/src/test/results/clientnegative/authorization_role_case.q.out ql/src/test/results/clientnegative/authorization_role_case.q.out index 4908e5dc26..7b07d1be40 100644 --- ql/src/test/results/clientnegative/authorization_role_case.q.out +++ ql/src/test/results/clientnegative/authorization_role_case.q.out @@ -31,4 +31,4 @@ POSTHOOK: Output: default@t1 PREHOOK: query: grant UPDATE on table t1 to role mixcaserole2 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role mixcaserole2 does not exist) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role mixcaserole2 does not exist) diff --git ql/src/test/results/clientnegative/authorization_role_cycles1.q.out ql/src/test/results/clientnegative/authorization_role_cycles1.q.out index 9303c7e69e..2085067420 100644 --- ql/src/test/results/clientnegative/authorization_role_cycles1.q.out +++ ql/src/test/results/clientnegative/authorization_role_cycles1.q.out @@ -16,4 +16,4 @@ POSTHOOK: query: grant role role1 to role role2 POSTHOOK: type: GRANT_ROLE PREHOOK: query: grant role role2 to role role1 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting role: Cannot grant role role1 to role2 as role2 already belongs to the role role1. (no cycles allowed) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Cannot grant role role1 to role2 as role2 already belongs to the role role1. (no cycles allowed) diff --git ql/src/test/results/clientnegative/authorization_role_cycles2.q.out ql/src/test/results/clientnegative/authorization_role_cycles2.q.out index df27beeafd..cf6b3913ca 100644 --- ql/src/test/results/clientnegative/authorization_role_cycles2.q.out +++ ql/src/test/results/clientnegative/authorization_role_cycles2.q.out @@ -40,4 +40,4 @@ POSTHOOK: query: grant role role5 to role role4 POSTHOOK: type: GRANT_ROLE PREHOOK: query: grant role role2 to role role4 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting role: Cannot grant role role4 to role2 as role2 already belongs to the role role4. (no cycles allowed) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Cannot grant role role4 to role2 as role2 already belongs to the role role4. (no cycles allowed) diff --git ql/src/test/results/clientnegative/authorization_role_grant.q.out ql/src/test/results/clientnegative/authorization_role_grant.q.out index cb79651f82..951b050dfc 100644 --- ql/src/test/results/clientnegative/authorization_role_grant.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant.q.out @@ -31,4 +31,4 @@ POSTHOOK: query: set role role_noadmin POSTHOOK: type: SHOW_ROLES PREHOOK: query: grant src_role_wadmin to user user3 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. diff --git ql/src/test/results/clientnegative/authorization_role_grant2.q.out ql/src/test/results/clientnegative/authorization_role_grant2.q.out index ade675252f..7beef281f6 100644 --- ql/src/test/results/clientnegative/authorization_role_grant2.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant2.q.out @@ -48,4 +48,4 @@ POSTHOOK: query: set role src_role_wadmin POSTHOOK: type: SHOW_ROLES PREHOOK: query: grant src_role_wadmin to user user3 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. diff --git ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out index 481842cdd5..34675bfe58 100644 --- ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out @@ -8,4 +8,4 @@ POSTHOOK: query: create role role1 POSTHOOK: type: CREATEROLE PREHOOK: query: grant role1 to role nosuchrole PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting role: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist diff --git ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out index 144b78701b..3a0760d41b 100644 --- ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out @@ -8,4 +8,4 @@ POSTHOOK: query: create role accounting POSTHOOK: type: CREATEROLE PREHOOK: query: show role grant role accounting PREHOOK: type: SHOW_ROLE_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error getting role grant information for user accounting: User : user1 is not allowed check privileges of a role it does not belong to : accounting. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error getting role grant information for user accounting: User : user1 is not allowed check privileges of a role it does not belong to : accounting. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out index a0c6845468..0da86c9e88 100644 --- ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out @@ -19,4 +19,4 @@ POSTHOOK: type: SHOW_ROLE_GRANT public false -1 PREHOOK: query: show role grant user ruser2 PREHOOK: type: SHOW_ROLE_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error getting role grant information for user ruser2: User : ruser1 is not allowed check privileges of another user : ruser2. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error getting role grant information for user ruser2: User : ruser1 is not allowed check privileges of another user : ruser2. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out index ee056be75d..56d6b7e314 100644 --- ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out +++ ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out @@ -1,3 +1,3 @@ PREHOOK: query: set role nosuchroleexists PREHOOK: type: SHOW_ROLES -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. hive_test_user doesn't belong to role nosuchroleexists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. hive_test_user doesn't belong to role nosuchroleexists diff --git ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out index 539ce39dec..0396a1f6d7 100644 --- ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out +++ ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out @@ -20,4 +20,4 @@ POSTHOOK: query: set role public POSTHOOK: type: SHOW_ROLES PREHOOK: query: set role nosuchroleexists PREHOOK: type: SHOW_ROLES -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. user2 doesn't belong to role nosuchroleexists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. user2 doesn't belong to role nosuchroleexists diff --git ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out index 736e69335f..d8dad360b0 100644 --- ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out +++ ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out @@ -8,4 +8,4 @@ POSTHOOK: query: create role role1 POSTHOOK: type: CREATEROLE PREHOOK: query: show grant role role1 PREHOOK: type: SHOW_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of a role it does not belong to : role1. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of a role it does not belong to : role1. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out index 6eef774759..506088d3b4 100644 --- ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out +++ ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out @@ -9,4 +9,4 @@ POSTHOOK: Output: default@t1 PREHOOK: query: grant ALL on t1 to role nosuchrole PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist diff --git ql/src/test/results/clientnegative/authorize_grant_public.q.out ql/src/test/results/clientnegative/authorize_grant_public.q.out index 6872b785d4..a25fa01528 100644 --- ql/src/test/results/clientnegative/authorize_grant_public.q.out +++ ql/src/test/results/clientnegative/authorize_grant_public.q.out @@ -1,3 +1,3 @@ PREHOOK: query: grant role public to user hive_test_user PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implicitly belong to public role.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implicitly belong to public role.) diff --git ql/src/test/results/clientnegative/authorize_revoke_public.q.out ql/src/test/results/clientnegative/authorize_revoke_public.q.out index ede74871d2..af3fbcb4da 100644 --- ql/src/test/results/clientnegative/authorize_revoke_public.q.out +++ ql/src/test/results/clientnegative/authorize_revoke_public.q.out @@ -1,3 +1,3 @@ PREHOOK: query: revoke role public from user hive_test_user PREHOOK: type: REVOKE_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.)