commit ba651e310fcf59e65df1d22e33e2fea12960c1fd Author: Daniel Dai Date: Tue Apr 10 14:37:07 2018 -0700 HIVE-19161: Add authorizations to information schema diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index e533ee6..71edac8 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2366,7 +2366,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "2. When HiveServer2 supports service discovery via Zookeeper.\n" + "3. For delegation token storage if zookeeper store is used, if\n" + "hive.cluster.delegation.token.store.zookeeper.connectString is not set\n" + - "4. LLAP daemon registry service"), + "4. LLAP daemon registry service\n" + + "5. Leader selection for privilege synchronizer"), HIVE_ZOOKEEPER_CLIENT_PORT("hive.zookeeper.client.port", "2181", "The port of ZooKeeper servers to talk to.\n" + @@ -2919,6 +2920,12 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal HIVE_SSL_PROTOCOL_BLACKLIST("hive.ssl.protocol.blacklist", "SSLv2,SSLv3", "SSL Versions to disable for all Hive Servers"), + HIVE_PRIVILEGE_SYNCHRONIZER("hive.privilege.synchronizer", false, + "Synchronize privileges from external authorizer such as ranger to Hive periodically in HS2"), + HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL("hive.privilege.synchronizer.interval", + "1800s", new TimeValidator(TimeUnit.SECONDS), + "Interval to synchronize privileges from external authorizer periodically in HS2"), + // HiveServer2 specific configs HIVE_SERVER2_CLEAR_DANGLING_SCRATCH_DIR("hive.server2.clear.dangling.scratchdir", false, "Clear dangling scratch dir periodically in HS2"), @@ -4210,6 +4217,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "hive.server2.authentication.ldap.userMembershipKey," + "hive.server2.authentication.ldap.groupClassKey," + "hive.server2.authentication.ldap.customLDAPQuery," + + "hive.privilege.synchronizer," + + "hive.privilege.synchronizer.interval," + "hive.spark.client.connect.timeout," + "hive.spark.client.server.connect.timeout," + "hive.spark.client.channel.log.level," + diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java index 801de7a..d684a8d 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.ISchema; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -541,6 +542,12 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } @Override + public boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) + throws InvalidObjectException, MetaException, NoSuchObjectException { + return objectStore.refreshPrivileges(objToRefresh, grantPrivileges); + } + + @Override public Role getRole(String roleName) throws NoSuchObjectException { return objectStore.getRole(roleName); } diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml index 05c362e..b92f8f1 100644 --- a/itests/hive-unit/pom.xml +++ b/itests/hive-unit/pom.xml @@ -46,6 +46,11 @@ org.apache.hive + hive-jdbc-handler + ${project.version} + + + org.apache.hive hive-service ${project.version} diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java new file mode 100644 index 0000000..245f0a5 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java @@ -0,0 +1,593 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.server; + +import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessController; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyChangeListener; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLsImpl; +import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessControllerWrapper; +import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizationValidator; +import org.apache.hive.beeline.BeeLine; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.apache.hive.service.cli.CLIServiceClient; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.SessionHandle; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestInformationSchemaWithPrivilege { + + // Group mapping: + // group_a: user1, user2 + // group_b: user2 + static class FakeGroupAuthenticator extends HadoopDefaultAuthenticator { + @Override + public List getGroupNames() { + List groups = new ArrayList(); + if (getUserName().equals("user1")) { + groups.add("group_a"); + } else if (getUserName().equals("user2")) { + groups.add("group_a"); + groups.add("group_b"); + } + return groups; + } + } + + // Privilege matrix: + // user1 user2 group_a group_b public + // testdb1: S S + // testtable1.*: SU S + // testtable2.*: S + // testtable3.*: S + // testtable4.*: S + // testdb2: S + // testtable1.key S + static class TestHivePolicyProvider implements HivePolicyProvider { + @Override + public HiveResourceACLs getResourceACLs(HivePrivilegeObject hiveObject) { + HiveResourceACLsImpl acls = new HiveResourceACLsImpl(); + if (hiveObject.getType() == HivePrivilegeObjectType.DATABASE) { + if (hiveObject.getDbname().equals("testdb1")) { + acls.addUserEntry("user1", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + acls.addGroupEntry("group_a", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb2")) { + acls.addUserEntry("user1", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } + } else if (hiveObject.getType() == HivePrivilegeObjectType.TABLE_OR_VIEW) { + if (hiveObject.getDbname().equals("testdb1") &&hiveObject.getObjectName().equals("testtable1")) { + acls.addUserEntry("user1", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + acls.addUserEntry("user1", HiveResourceACLs.Privilege.UPDATE, HiveResourceACLs.AccessResult.ALLOWED); + acls.addUserEntry("user2", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb1") && hiveObject.getObjectName().equals("testtable2")) { + acls.addGroupEntry("group_a", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb1") && hiveObject.getObjectName().equals("testtable3")) { + acls.addGroupEntry("public", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb1") && hiveObject.getObjectName().equals("testtable4")) { + acls.addGroupEntry("group_b", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb2") && hiveObject.getObjectName().equals("testtable1")) { + acls.addUserEntry("user1", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } + } else if (hiveObject.getType() == HivePrivilegeObjectType.COLUMN) { + if (hiveObject.getDbname().equals("testdb1") &&hiveObject.getObjectName().equals("testtable1")) { + acls.addUserEntry("user1", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + acls.addUserEntry("user2", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb1") && hiveObject.getObjectName().equals("testtable2")) { + acls.addGroupEntry("group_a", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb1") && hiveObject.getObjectName().equals("testtable3")) { + acls.addGroupEntry("public", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb1") && hiveObject.getObjectName().equals("testtable4")) { + acls.addGroupEntry("group_b", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } else if (hiveObject.getDbname().equals("testdb2") && hiveObject.getObjectName().equals("testtable1") + && hiveObject.getColumns().get(0).equals("key")) { + acls.addUserEntry("user1", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED); + } + } + return acls; + } + + @Override + public void registerHivePolicyChangeListener(HivePolicyChangeListener listener) { + // PolicyChangeListener will be implemented later + } + } + + static class HiveAuthorizerImplWithPolicyProvider extends HiveAuthorizerImpl { + + public HiveAuthorizerImplWithPolicyProvider(HiveAccessController accessController, HiveAuthorizationValidator authValidator) { + super(accessController, authValidator); + } + + @Override + public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException { + return new TestHivePolicyProvider(); + } + } + + static class HiveAuthorizerImplWithNullPolicyProvider extends HiveAuthorizerImpl { + + public HiveAuthorizerImplWithNullPolicyProvider(HiveAccessController accessController, HiveAuthorizationValidator authValidator) { + super(accessController, authValidator); + } + + @Override + public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException { + return null; + } + } + + static class TestHiveAuthorizerFactory implements HiveAuthorizerFactory { + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException { + SQLStdHiveAccessControllerWrapper privilegeManager = + new SQLStdHiveAccessControllerWrapper(metastoreClientFactory, conf, authenticator, ctx); + return new HiveAuthorizerImplWithPolicyProvider( + privilegeManager, + new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator, + privilegeManager, ctx) + ); + } + } + + static class TestHiveAuthorizerNullPolicyProviderFactory implements HiveAuthorizerFactory { + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException { + SQLStdHiveAccessControllerWrapper privilegeManager = + new SQLStdHiveAccessControllerWrapper(metastoreClientFactory, conf, authenticator, ctx); + return new HiveAuthorizerImplWithNullPolicyProvider( + privilegeManager, + new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator, + privilegeManager, ctx) + ); + } + } + + private static MiniHS2 miniHS2 = null; + private static MiniZooKeeperCluster zkCluster = null; + private static Map confOverlay; + + @BeforeClass + public static void beforeTest() throws Exception { + File zkDataDir = new File(System.getProperty("test.tmp.dir")); + zkCluster = new MiniZooKeeperCluster(); + int zkPort = zkCluster.startup(zkDataDir); + + miniHS2 = new MiniHS2(new HiveConf()); + Map confOverlay = new HashMap(); + confOverlay.put(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER.varname, "true"); + confOverlay.put(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL.varname, "1"); + confOverlay.put(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY.varname, "true"); + confOverlay.put(ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, TestHiveAuthorizerFactory.class.getName()); + confOverlay.put(ConfVars.HIVE_ZOOKEEPER_QUORUM.varname, "localhost"); + confOverlay.put(ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT.varname, Integer.toString(zkPort)); + confOverlay.put(MetastoreConf.ConfVars.AUTO_CREATE_ALL.getVarname(), "true"); + confOverlay.put(ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, FakeGroupAuthenticator.class.getName()); + miniHS2.start(confOverlay); + } + + @Test + public void test() throws Exception { + + String db1Name = "testdb1"; + String db2Name = "testdb2"; + String table1Name = "testtable1"; + String table2Name = "testtable2"; + String table3Name = "testtable3"; + String table4Name = "testtable4"; + CLIServiceClient serviceClient = miniHS2.getServiceClient(); + SessionHandle sessHandle = serviceClient.openSession("hive_test_user", ""); + serviceClient.executeStatement(sessHandle, "DROP DATABASE IF EXISTS " + db1Name + " CASCADE", confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE DATABASE " + db1Name, confOverlay); + serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + db1Name + "." + table1Name, confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE TABLE " + db1Name + "." + table1Name + "(key string, value double)", confOverlay); + serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + db1Name + "." + table2Name, confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE TABLE " + db1Name + "." + table2Name + "(key string, value double)", confOverlay); + serviceClient.executeStatement(sessHandle, "DROP VIEW IF EXISTS " + db1Name + "." + table3Name, confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE VIEW " + db1Name + "." + table3Name + " AS SELECT * FROM " + db1Name + "." + table1Name, confOverlay); + serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + db1Name + "." + table4Name, confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE TABLE " + db1Name + "." + table4Name + "(key string, value double) PARTITIONED BY (p string)", confOverlay); + + serviceClient.executeStatement(sessHandle, "DROP DATABASE IF EXISTS " + db2Name + " CASCADE", confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE DATABASE " + db2Name, confOverlay); + serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS " + db2Name + "." + table1Name, confOverlay); + serviceClient.executeStatement(sessHandle, "CREATE TABLE " + db2Name + "." + table1Name + "(key string, value double)", confOverlay); + + // Just to trigger auto creation of needed metastore tables + serviceClient.executeStatement(sessHandle, "SHOW GRANT USER hive_test_user ON ALL", confOverlay); + serviceClient.closeSession(sessHandle); + + List baseArgs = new ArrayList(); + baseArgs.add("-d"); + baseArgs.add(BeeLine.BEELINE_DEFAULT_JDBC_DRIVER); + baseArgs.add("-u"); + baseArgs.add(miniHS2.getBaseJdbcURL()); + baseArgs.add("-n"); + baseArgs.add("hive_test_user"); + + List args = new ArrayList(baseArgs); + args.add("-f"); + args.add("../../metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql"); + BeeLine beeLine = new BeeLine(); + int result = beeLine.begin(args.toArray(new String[]{}), null); + beeLine.close(); + Assert.assertEquals(result, 0); + + boolean containsDb1 = false; + boolean containsDb2 = false; + boolean containsDb1Table1 = false; + boolean containsDb1Table2 = false; + boolean containsDb1Table3 = false; + boolean containsDb1Table4 = false; + boolean containsDb2Table1 = false; + boolean containsDb1Table1SelectPriv = false; + boolean containsDb1Table1UpdatePriv = false; + boolean containsDb1Table2SelectPriv = false; + boolean containsDb1Table3SelectPriv = false; + boolean containsDb1Table4SelectPriv = false; + boolean containsDb2Table1SelectPriv = false; + boolean containsDb1Table1Key = false; + boolean containsDb1Table1Value = false; + boolean containsDb1Table2Key = false; + boolean containsDb1Table2Value = false; + boolean containsDb1Table3Key = false; + boolean containsDb1Table3Value = false; + boolean containsDb1Table4Key = false; + boolean containsDb1Table4Value = false; + boolean containsDb1Table4P = false; + boolean containsDb2Table1Key = false; + + // We shall have enough time to synchronize privileges during loading information schema + + // User1 privileges: + // testdb1: S + // testtable1.*: SU + // testtable2.*: S + // testtable3.*: S + // testtable4.*: + // testdb2: S + // testtable1.*: S + sessHandle = serviceClient.openSession("user1", ""); + OperationHandle opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.SCHEMATA", confOverlay); + RowSet rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 2); + Iterator iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name)) { + containsDb1 = true; + } else if (cols[1].equals(db2Name)) { + containsDb2 = true; + } + } + Assert.assertTrue(containsDb1 && containsDb2); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.TABLES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 4); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name) && cols[2].equals(table1Name)) { + containsDb1Table1 = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table2Name)) { + containsDb1Table2 = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table3Name)) { + containsDb1Table3 = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table4Name)) { + containsDb1Table4 = true; + } else if (cols[1].equals(db2Name) && cols[2].equals(table1Name)) { + containsDb2Table1 = true; + } + } + Assert.assertTrue(containsDb1Table1 && containsDb1Table2 && containsDb1Table3 && !containsDb1Table4 + && containsDb2Table1); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.VIEWS", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 1); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name) && cols[2].equals(table3Name)) { + containsDb1Table3 = true; + } else { + containsDb1Table3 = false; + } + } + Assert.assertTrue(containsDb1Table3); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.TABLE_PRIVILEGES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 5); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("SELECT")) { + containsDb1Table1SelectPriv = true; + } if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("UPDATE")) { + containsDb1Table1UpdatePriv = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table2Name) && cols[5].equals("SELECT")) { + containsDb1Table2SelectPriv = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table3Name) && cols[5].equals("SELECT")) { + containsDb1Table3SelectPriv = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table4Name) && cols[5].equals("SELECT")) { + containsDb1Table4SelectPriv = true; + } else if (cols[3].equals(db2Name) && cols[4].equals(table1Name) && cols[5].equals("SELECT")) { + containsDb2Table1SelectPriv = true; + } + } + Assert.assertTrue(containsDb1Table1SelectPriv && containsDb1Table1UpdatePriv + && containsDb1Table2SelectPriv && containsDb1Table3SelectPriv + && !containsDb1Table4SelectPriv && containsDb2Table1SelectPriv); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.COLUMNS", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 7); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name) && cols[2].equals(table1Name) && cols[3].equals("key")) { + containsDb1Table1Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table1Name) && cols[3].equals("value")) { + containsDb1Table1Value = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table2Name) && cols[3].equals("key")) { + containsDb1Table2Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table2Name) && cols[3].equals("value")) { + containsDb1Table2Value = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table3Name) && cols[3].equals("key")) { + containsDb1Table3Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table3Name) && cols[3].equals("value")) { + containsDb1Table3Value = true; + } else if (cols[1].equals(db2Name) && cols[2].equals(table1Name) && cols[3].equals("key")) { + containsDb2Table1Key = true; + } + } + Assert.assertTrue(containsDb1Table1Key && containsDb1Table1Value && containsDb1Table2Key + && containsDb1Table2Value && containsDb1Table3Key && containsDb1Table3Value && containsDb2Table1Key); + + containsDb1Table1Key = false; + containsDb1Table1Value = false; + containsDb1Table2Key = false; + containsDb1Table2Value = false; + containsDb1Table3Key = false; + containsDb1Table3Value = false; + containsDb2Table1Key = false; + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.COLUMN_PRIVILEGES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 7); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("key")) { + containsDb1Table1Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("value")) { + containsDb1Table1Value = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table2Name) && cols[5].equals("key")) { + containsDb1Table2Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table2Name) && cols[5].equals("value")) { + containsDb1Table2Value = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table3Name) && cols[5].equals("key")) { + containsDb1Table3Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table3Name) && cols[5].equals("value")) { + containsDb1Table3Value = true; + } else if (cols[3].equals(db2Name) && cols[4].equals(table1Name) && cols[5].equals("key")) { + containsDb2Table1Key = true; + } + } + Assert.assertTrue(containsDb1Table1Key && containsDb1Table1Value && containsDb1Table2Key + && containsDb1Table2Value && containsDb1Table3Key && containsDb1Table3Value && containsDb2Table1Key); + serviceClient.closeSession(sessHandle); + + // User2 privileges: + // testdb1: S + // testtable1.*: S + // testtable2.*: S + // testtable3.*: S + // testtable4.*: S + // testdb2: + // testtable1.*: + sessHandle = serviceClient.openSession("user2", ""); + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.SCHEMATA", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 1); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name)) { + containsDb1 = true; + } + } + Assert.assertTrue(containsDb1); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.TABLES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 4); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name) && cols[2].equals(table1Name)) { + containsDb1Table1 = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table2Name)) { + containsDb1Table2 = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table3Name)) { + containsDb1Table3 = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table4Name)) { + containsDb1Table4 = true; + } + } + Assert.assertTrue(containsDb1Table1 && containsDb1Table2 && containsDb1Table3 && containsDb1Table4); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.VIEWS", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 1); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name) && cols[2].equals(table3Name)) { + containsDb1Table3 = true; + } else { + containsDb1Table3 = false; + } + } + Assert.assertTrue(containsDb1Table3); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.TABLE_PRIVILEGES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 4); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("SELECT")) { + containsDb1Table1SelectPriv = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table2Name) && cols[5].equals("SELECT")) { + containsDb1Table2SelectPriv = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table3Name) && cols[5].equals("SELECT")) { + containsDb1Table3SelectPriv = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table4Name) && cols[5].equals("SELECT")) { + containsDb1Table4SelectPriv = true; + } + } + Assert.assertTrue(containsDb1Table1SelectPriv && containsDb1Table2SelectPriv + && containsDb1Table3SelectPriv && containsDb1Table4SelectPriv); + + // db1.testtable3.p should also be in COLUMNS, will fix in separate ticket + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.COLUMNS", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 8); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[1].equals(db1Name) && cols[2].equals(table1Name) && cols[3].equals("key")) { + containsDb1Table1Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table1Name) && cols[3].equals("value")) { + containsDb1Table1Value = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table2Name) && cols[3].equals("key")) { + containsDb1Table2Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table2Name) && cols[3].equals("value")) { + containsDb1Table2Value = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table3Name) && cols[3].equals("key")) { + containsDb1Table3Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table3Name) && cols[3].equals("value")) { + containsDb1Table3Value = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table4Name) && cols[3].equals("key")) { + containsDb1Table4Key = true; + } else if (cols[1].equals(db1Name) && cols[2].equals(table4Name) && cols[3].equals("value")) { + containsDb1Table4Value = true; + } + } + Assert.assertTrue(containsDb1Table1Key && containsDb1Table1Value && containsDb1Table2Key + && containsDb1Table2Value && containsDb1Table3Key && containsDb1Table3Value + && containsDb1Table4Key && containsDb1Table4Value); + + containsDb1Table1Key = false; + containsDb1Table1Value = false; + containsDb1Table2Key = false; + containsDb1Table2Value = false; + containsDb1Table3Key = false; + containsDb1Table3Value = false; + containsDb1Table4Key = false; + containsDb1Table4Value = false; + containsDb1Table4P = false; + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.COLUMN_PRIVILEGES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 9); + iter = rowSet.iterator(); + while (iter.hasNext()) { + Object[] cols = iter.next(); + if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("key")) { + containsDb1Table1Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table1Name) && cols[5].equals("value")) { + containsDb1Table1Value = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table2Name) && cols[5].equals("key")) { + containsDb1Table2Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table2Name) && cols[5].equals("value")) { + containsDb1Table2Value = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table3Name) && cols[5].equals("key")) { + containsDb1Table3Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table3Name) && cols[5].equals("value")) { + containsDb1Table3Value = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table4Name) && cols[5].equals("key")) { + containsDb1Table4Key = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table4Name) && cols[5].equals("value")) { + containsDb1Table4Value = true; + } else if (cols[3].equals(db1Name) && cols[4].equals(table4Name) && cols[5].equals("p")) { + containsDb1Table4P = true; + } + } + Assert.assertTrue(containsDb1Table1Key && containsDb1Table1Value && containsDb1Table2Key + && containsDb1Table2Value && containsDb1Table3Key && containsDb1Table3Value + && containsDb1Table4Key && containsDb1Table4Value && containsDb1Table4P); + serviceClient.closeSession(sessHandle); + + // Revert hive.server2.restrict_information_schema to false + miniHS2.getHiveConf().set(ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, + TestHiveAuthorizerNullPolicyProviderFactory.class.getName()); + miniHS2.getHiveConf().unset(MetastoreConf.ConfVars.PRE_EVENT_LISTENERS.getVarname()); + + sessHandle = serviceClient.openSession("user1", ""); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.SCHEMATA", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertTrue(rowSet.numRows() > 2); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.TABLES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertTrue(rowSet.numRows() > 10); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.TABLE_PRIVILEGES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 7); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.COLUMNS", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertTrue(rowSet.numRows() > 10); + + opHandle = serviceClient.executeStatement(sessHandle, "select * from INFORMATION_SCHEMA.COLUMN_PRIVILEGES", confOverlay); + rowSet = serviceClient.fetchResults(opHandle); + Assert.assertEquals(rowSet.numRows(), 12); + } +} diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java index 7dc690f..82bf921 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java @@ -36,6 +36,18 @@ public static DatabaseAccessor getAccessor(DatabaseType dbType) { accessor = new MySqlDatabaseAccessor(); break; + case POSTGRES: + accessor = new PostgresDatabaseAccessor(); + break; + + case ORACLE: + accessor = new OracleDatabaseAccessor(); + break; + + case MSSQL: + accessor = new MsSqlDatabaseAccessor(); + break; + default: accessor = new GenericJdbcDatabaseAccessor(); break; diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java index 178c97d..20fb35d 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java @@ -16,6 +16,7 @@ import org.apache.commons.dbcp.BasicDataSourceFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; @@ -146,8 +147,7 @@ public int getTotalNumberOfRecords(Configuration conf) throws HiveJdbcDatabaseAc ps = conn.prepareStatement(limitQuery, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ps.setFetchSize(getFetchSize(conf)); rs = ps.executeQuery(); - - return new JdbcRecordIterator(conn, ps, rs); + return new JdbcRecordIterator(conn, ps, rs, conf.get(serdeConstants.LIST_COLUMN_TYPES)); } catch (Exception e) { LOGGER.error("Caught exception while trying to execute query", e); diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java index 638e2b0..0c0df33 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/JdbcRecordIterator.java @@ -14,6 +14,9 @@ */ package org.apache.hive.storage.jdbc.dao; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.NullWritable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -22,6 +25,7 @@ import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -36,12 +40,15 @@ private Connection conn; private PreparedStatement ps; private ResultSet rs; + private ArrayList columnTypes = null; - - public JdbcRecordIterator(Connection conn, PreparedStatement ps, ResultSet rs) { + public JdbcRecordIterator(Connection conn, PreparedStatement ps, ResultSet rs, String typeString) { this.conn = conn; this.ps = ps; this.rs = rs; + if (typeString != null) { + this.columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(typeString); + } } @@ -65,7 +72,29 @@ public boolean hasNext() { Map record = new HashMap(numColumns); for (int i = 0; i < numColumns; i++) { String key = metadata.getColumnName(i + 1); - Object value = rs.getObject(i + 1); + Object value; + if (columnTypes!=null && columnTypes.get(i) instanceof PrimitiveTypeInfo) { + // This is not a complete list, barely make information schema work + switch (((PrimitiveTypeInfo)columnTypes.get(i)).getTypeName()) { + case "bigint": + value = rs.getLong(i + 1); + break; + case "int": + value = rs.getInt(i + 1); + break; + case "double": + value = rs.getDouble(i + 1); + break; + case "boolean": + value = rs.getBoolean(i + 1); + break; + default: + value = rs.getObject(i + 1); + break; + } + } else { + value = rs.getObject(i + 1); + } record.put(key, value); } diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MsSqlDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MsSqlDatabaseAccessor.java new file mode 100644 index 0000000..15390da --- /dev/null +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/MsSqlDatabaseAccessor.java @@ -0,0 +1,39 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc.dao; + +/** + * MSSQL specific data accessor. This is needed because MSSQL JDBC drivers do not support generic LIMIT and OFFSET + * escape functions + */ +public class MsSqlDatabaseAccessor extends GenericJdbcDatabaseAccessor { + + @Override + protected String addLimitAndOffsetToQuery(String sql, int limit, int offset) { + if (offset == 0) { + return addLimitToQuery(sql, limit); + } + else { + // Order by is not necessary, but MS SQL require it to use FETCH + return sql + " ORDER BY 1 OFFSET " + offset + " ROWS FETCH NEXT " + limit + " ROWS ONLY"; + } + } + + @Override + protected String addLimitToQuery(String sql, int limit) { + return sql + " {LIMIT " + limit + "}"; + } + +} diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java new file mode 100644 index 0000000..7297f79 --- /dev/null +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/OracleDatabaseAccessor.java @@ -0,0 +1,44 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc.dao; + +/** + * Oracle specific data accessor. This is needed because Oracle JDBC drivers do not support generic LIMIT and OFFSET + * escape functions + */ +public class OracleDatabaseAccessor extends GenericJdbcDatabaseAccessor { + + // Random column name to reduce the chance of conflict + static final String RowNumColumnName = "dummy_rownum_col_rn1938392"; + + @Override + protected String addLimitAndOffsetToQuery(String sql, int limit, int offset) { + if (offset == 0) { + return addLimitToQuery(sql, limit); + } + else { + // A simple ROWNUM > offset and ROWNUM <= (offset + limit) won't work, it will return nothing + return "SELECT * FROM (SELECT t.*, ROWNUM AS " + RowNumColumnName + " FROM (" + sql + ") t) WHERE " + + RowNumColumnName + " >" + offset + " AND " + RowNumColumnName + " <=" + (offset + limit); + } + } + + + @Override + protected String addLimitToQuery(String sql, int limit) { + return "SELECT * FROM (" + sql + ") WHERE ROWNUM <= " + limit; + } + +} diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/PostgresDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/PostgresDatabaseAccessor.java new file mode 100644 index 0000000..64bbc65 --- /dev/null +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/PostgresDatabaseAccessor.java @@ -0,0 +1,37 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.storage.jdbc.dao; + +/** + * Postgres specific data accessor. Postgres JDBC drivers do not support generic LIMIT and OFFSET + * escape functions + */ +public class PostgresDatabaseAccessor extends GenericJdbcDatabaseAccessor { + + @Override + protected String addLimitAndOffsetToQuery(String sql, int limit, int offset) { + if (offset == 0) { + return addLimitToQuery(sql, limit); + } + else { + return sql + " LIMIT " + limit + " OFFSET " + offset; + } + } + + @Override + protected String addLimitToQuery(String sql, int limit) { + return sql + " LIMIT " + limit; + } +} diff --git a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql index 3444439..985f3c8 100644 --- a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql +++ b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql @@ -96,7 +96,7 @@ TBLPROPERTIES ( \"OWNER_NAME\", \"OWNER_TYPE\" FROM - DBS" + \"DBS\"" ); CREATE TABLE IF NOT EXISTS `DB_PRIVS` ( @@ -605,7 +605,7 @@ TBLPROPERTIES ( \"VIEW_EXPANDED_TEXT\", \"VIEW_ORIGINAL_TEXT\", \"IS_REWRITE_ENABLED\" -FROM TBLS" +FROM \"TBLS\"" ); CREATE TABLE IF NOT EXISTS `MV_CREATION_METADATA` ( @@ -624,7 +624,7 @@ TBLPROPERTIES ( \"DB_NAME\", \"TBL_NAME\", \"TXN_LIST\" -FROM MV_CREATION_METADATA" +FROM \"MV_CREATION_METADATA\"" ); CREATE TABLE IF NOT EXISTS `MV_TABLES_USED` ( @@ -638,7 +638,7 @@ TBLPROPERTIES ( "SELECT \"MV_CREATION_METADATA_ID\", \"TBL_ID\" -FROM MV_TABLES_USED" +FROM \"MV_TABLES_USED\"" ); CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` ( @@ -964,16 +964,16 @@ TBLPROPERTIES ( "hive.sql.database.type" = "METASTORE", "hive.sql.query" = "SELECT - r.NAME RP_NAME, - t.NAME NAME, - TRIGGER_EXPRESSION, - ACTION_EXPRESSION + r.\"NAME\" AS RP_NAME, + t.\"NAME\" AS NAME, + \"TRIGGER_EXPRESSION\", + \"ACTION_EXPRESSION\" FROM - WM_TRIGGER t + \"WM_TRIGGER\" t JOIN - WM_RESOURCEPLAN r + \"WM_RESOURCEPLAN\" r ON - t.RP_ID = r.RP_ID" + t.\"RP_ID\" = r.\"RP_ID\"" ); CREATE TABLE IF NOT EXISTS `WM_POOLS` ( @@ -988,17 +988,17 @@ TBLPROPERTIES ( "hive.sql.database.type" = "METASTORE", "hive.sql.query" = "SELECT - WM_RESOURCEPLAN.NAME, - WM_POOL.PATH, - WM_POOL.ALLOC_FRACTION, - WM_POOL.QUERY_PARALLELISM, - WM_POOL.SCHEDULING_POLICY + \"WM_RESOURCEPLAN\".\"NAME\", + \"WM_POOL\".\"PATH\", + \"WM_POOL\".\"ALLOC_FRACTION\", + \"WM_POOL\".\"QUERY_PARALLELISM\", + \"WM_POOL\".\"SCHEDULING_POLICY\" FROM - WM_POOL + \"WM_POOL\" JOIN - WM_RESOURCEPLAN + \"WM_RESOURCEPLAN\" ON - WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID" + \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"" ); CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` ( @@ -1011,21 +1011,21 @@ TBLPROPERTIES ( "hive.sql.database.type" = "METASTORE", "hive.sql.query" = "SELECT - WM_RESOURCEPLAN.NAME RP_NAME, - WM_POOL.PATH POOL_PATH, - WM_TRIGGER.NAME TRIGGER_NAME -FROM WM_POOL_TO_TRIGGER - JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID - JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID - JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID + \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME, + \"WM_POOL\".\"PATH\" AS POOL_PATH, + \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME +FROM \"WM_POOL_TO_TRIGGER\" + JOIN \"WM_POOL\" ON \"WM_POOL_TO_TRIGGER\".\"POOL_ID\" = \"WM_POOL\".\"POOL_ID\" + JOIN \"WM_TRIGGER\" ON \"WM_POOL_TO_TRIGGER\".\"TRIGGER_ID\" = \"WM_TRIGGER\".\"TRIGGER_ID\" + JOIN \"WM_RESOURCEPLAN\" ON \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\" UNION SELECT - WM_RESOURCEPLAN.NAME RP_NAME, - '' POOL_PATH, - WM_TRIGGER.NAME TRIGGER_NAME -FROM WM_TRIGGER - JOIN WM_RESOURCEPLAN ON WM_TRIGGER.RP_ID = WM_RESOURCEPLAN.RP_ID -WHERE WM_TRIGGER.IS_IN_UNMANAGED = 1 + \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME, + '' AS POOL_PATH, + \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME +FROM \"WM_TRIGGER\" + JOIN \"WM_RESOURCEPLAN\" ON \"WM_TRIGGER\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\" +WHERE CAST(\"WM_TRIGGER\".\"IS_IN_UNMANAGED\" AS INT) = 1 " ); @@ -1041,14 +1041,14 @@ TBLPROPERTIES ( "hive.sql.database.type" = "METASTORE", "hive.sql.query" = "SELECT - WM_RESOURCEPLAN.NAME, - ENTITY_TYPE, - ENTITY_NAME, - case when WM_POOL.PATH is null then '' else WM_POOL.PATH end, - ORDERING -FROM WM_MAPPING -JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID -LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID + \"WM_RESOURCEPLAN\".\"NAME\", + \"ENTITY_TYPE\", + \"ENTITY_NAME\", + case when \"WM_POOL\".\"PATH\" is null then '' else \"WM_POOL\".\"PATH\" end, + \"ORDERING\" +FROM \"WM_MAPPING\" +JOIN \"WM_RESOURCEPLAN\" ON \"WM_MAPPING\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\" +LEFT OUTER JOIN \"WM_POOL\" ON \"WM_POOL\".\"POOL_ID\" = \"WM_MAPPING\".\"POOL_ID\" " ); @@ -1067,16 +1067,22 @@ CREATE VIEW IF NOT EXISTS `SCHEMATA` `DEFAULT_CHARACTER_SET_NAME`, `SQL_PATH` ) AS -SELECT +SELECT DISTINCT 'default', - `NAME`, - `OWNER_NAME`, + D.`NAME`, + D.`OWNER_NAME`, cast(null as string), cast(null as string), cast(null as string), `DB_LOCATION_URI` FROM - sys.DBS; + `sys`.`DBS` D, `sys`.`TBLS` T, `sys`.`TBL_PRIVS` P +WHERE + NOT restrict_information_schema() OR + D.`DB_ID` = T.`DB_ID` + AND T.`TBL_ID` = P.`TBL_ID` + AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER' + OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP')); CREATE VIEW IF NOT EXISTS `TABLES` ( @@ -1093,7 +1099,7 @@ CREATE VIEW IF NOT EXISTS `TABLES` `IS_TYPED`, `COMMIT_ACTION` ) AS -SELECT +SELECT DISTINCT 'default', D.NAME, T.TBL_NAME, @@ -1107,9 +1113,13 @@ SELECT 'NO', cast(null as string) FROM - `sys`.`TBLS` T, `sys`.`DBS` D + `sys`.`TBLS` T, `sys`.`DBS` D, `sys`.`TBL_PRIVS` P WHERE - D.`DB_ID` = T.`DB_ID`; + D.`DB_ID` = T.`DB_ID` + AND (NOT restrict_information_schema() OR T.`TBL_ID` = P.`TBL_ID` + AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER' + OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP')) + AND P.`TBL_PRIV`='SELECT'); CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES` ( @@ -1122,9 +1132,9 @@ CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES` `IS_GRANTABLE`, `WITH_HIERARCHY` ) AS -SELECT - `GRANTOR`, - `PRINCIPAL_NAME`, +SELECT DISTINCT + P.`GRANTOR`, + P.`PRINCIPAL_NAME`, 'default', D.`NAME`, T.`TBL_NAME`, @@ -1132,12 +1142,18 @@ SELECT IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'), 'NO' FROM - sys.`TBL_PRIVS` P, - sys.`TBLS` T, - sys.`DBS` D + `sys`.`TBL_PRIVS` P, + `sys`.`TBLS` T, + `sys`.`DBS` D, + `sys`.`TBL_PRIVS` P2 WHERE - P.TBL_ID = T.TBL_ID - AND T.DB_ID = D.DB_ID; + P.`TBL_ID` = T.`TBL_ID` + AND T.`DB_ID` = D.`DB_ID` + AND (NOT restrict_information_schema() OR + P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE` + AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER' + OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP')) + AND P2.`TBL_PRIV`='SELECT'); CREATE VIEW IF NOT EXISTS `COLUMNS` ( @@ -1189,7 +1205,7 @@ CREATE VIEW IF NOT EXISTS `COLUMNS` `DECLARED_NUMERIC_PRECISION`, `DECLARED_NUMERIC_SCALE` ) AS -SELECT +SELECT DISTINCT 'default', D.NAME, T.TBL_NAME, @@ -1278,14 +1294,21 @@ SELECT WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10 ELSE null END FROM - sys.`COLUMNS_V2` C, - sys.`SDS` S, - sys.`TBLS` T, - sys.`DBS` D + `sys`.`COLUMNS_V2` C, + `sys`.`SDS` S, + `sys`.`TBLS` T, + `sys`.`DBS` D, + `sys`.`TBL_COL_PRIVS` P WHERE S.`SD_ID` = T.`SD_ID` AND T.`DB_ID` = D.`DB_ID` - AND C.`CD_ID` = S.`CD_ID`; + AND C.`CD_ID` = S.`CD_ID` + AND (NOT restrict_information_schema() OR + T.`TBL_ID` = P.`TBL_ID` + AND C.`COLUMN_NAME` = P.`COLUMN_NAME` + AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER' + OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP')) + AND P.`TBL_COL_PRIV`='SELECT'); CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES` ( @@ -1298,27 +1321,30 @@ CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES` `PRIVILEGE_TYPE`, `IS_GRANTABLE` ) AS -SELECT - `GRANTOR`, - `PRINCIPAL_NAME`, +SELECT DISTINCT + P.`GRANTOR`, + P.`PRINCIPAL_NAME`, 'default', D.`NAME`, T.`TBL_NAME`, - C.`COLUMN_NAME`, + P.`COLUMN_NAME`, P.`TBL_COL_PRIV`, IF (P.`GRANT_OPTION` == 0, 'NO', 'YES') FROM - sys.`TBL_COL_PRIVS` P, - sys.`TBLS` T, - sys.`DBS` D, - sys.`COLUMNS_V2` C, - sys.`SDS` S + `sys`.`TBL_COL_PRIVS` P, + `sys`.`TBLS` T, + `sys`.`DBS` D, + `sys`.`SDS` S, + `sys`.`TBL_PRIVS` P2 WHERE S.`SD_ID` = T.`SD_ID` AND T.`DB_ID` = D.`DB_ID` AND P.`TBL_ID` = T.`TBL_ID` - AND P.`COLUMN_NAME` = C.`COLUMN_NAME` - AND C.`CD_ID` = S.`CD_ID`; + AND (NOT restrict_information_schema() OR + P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE` + AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER' + OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP')) + AND P2.`TBL_PRIV`='SELECT'); CREATE VIEW IF NOT EXISTS `VIEWS` ( @@ -1333,7 +1359,7 @@ CREATE VIEW IF NOT EXISTS `VIEWS` `IS_TRIGGER_DELETABLE`, `IS_TRIGGER_INSERTABLE_INTO` ) AS -SELECT +SELECT DISTINCT 'default', D.NAME, T.TBL_NAME, @@ -1346,7 +1372,13 @@ SELECT false FROM `sys`.`DBS` D, - `sys`.`TBLS` T + `sys`.`TBLS` T, + `sys`.`TBL_PRIVS` P WHERE - D.`DB_ID` = T.`DB_ID` AND - length(T.VIEW_ORIGINAL_TEXT) > 0; + D.`DB_ID` = T.`DB_ID` + AND length(T.VIEW_ORIGINAL_TEXT) > 0 + AND (NOT restrict_information_schema() OR + T.`TBL_ID` = P.`TBL_ID` + AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER' + OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP')) + AND P.`TBL_PRIV`='SELECT'); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index d59bf1f..66ed6c1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -356,7 +356,9 @@ system.registerGenericUDF("current_date", GenericUDFCurrentDate.class); system.registerGenericUDF("current_timestamp", GenericUDFCurrentTimestamp.class); system.registerGenericUDF("current_user", GenericUDFCurrentUser.class); + system.registerGenericUDF("current_groups", GenericUDFCurrentGroups.class); system.registerGenericUDF("logged_in_user", GenericUDFLoggedInUser.class); + system.registerGenericUDF("restrict_information_schema", GenericUDFRestrictInformationSchema.class); system.registerGenericUDF("isnull", GenericUDFOPNull.class); system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java index 60d9dc1..4edbc53 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider; /** * Hive's pluggable authorization provider interface @@ -124,4 +126,11 @@ public void authorize(Table table, Partition part, List columns, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, AuthorizationException; + /** + * @return HivePolicyProvider instance (expected to be a singleton) + * @throws HiveAuthzPluginException + */ + default public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException { + return null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java new file mode 100644 index 0000000..1de1002 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.apache.curator.framework.recipes.leader.LeaderLatch; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class PrivilegeSynchonizer implements Runnable { + + private static final Logger LOG = LoggerFactory.getLogger(PrivilegeSynchonizer.class); + public static final String GRANTOR = "ranger"; + private IMetaStoreClient hiveClient; + private HivePolicyProvider policyProvider; + private LeaderLatch privilegeSynchonizerLatch; + private HiveConf hiveConf; + + public PrivilegeSynchonizer(LeaderLatch privilegeSynchonizerLatch, HivePolicyProvider policyProvider, + HiveConf hiveConf) { + try { + hiveClient = new HiveMetastoreClientFactoryImpl().getHiveMetastoreClient(); + } catch (HiveAuthzPluginException e) { + throw new RuntimeException("Error creating getHiveMetastoreClient", e); + } + this.privilegeSynchonizerLatch = privilegeSynchonizerLatch; + this.policyProvider = policyProvider; + this.hiveConf = hiveConf; + } + + private void addACLsToBag(Map> principalAclsMap, PrivilegeBag privBag, + HiveObjectType objectType, String dbName, String tblName, String columnName, PrincipalType principalType) { + + for (Map.Entry> principalAcls : principalAclsMap.entrySet()) { + String principal = principalAcls.getKey(); + for (Map.Entry acl : principalAcls.getValue().entrySet()) { + if (acl.getValue() == HiveResourceACLs.AccessResult.ALLOWED) { + switch (objectType) { + case DATABASE: + privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef( + HiveObjectType.DATABASE, dbName, null, null, null), principal, principalType, + new PrivilegeGrantInfo(acl.getKey().toString(), (int)(System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false))); + break; + case TABLE: + privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef( + HiveObjectType.TABLE, dbName, tblName, null, null), principal, principalType, + new PrivilegeGrantInfo(acl.getKey().toString(), (int)(System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false))); + break; + case COLUMN: + privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef( + HiveObjectType.COLUMN, dbName, tblName, null, columnName), principal, principalType, + new PrivilegeGrantInfo(acl.getKey().toString(), (int)(System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false))); + break; + default: + throw new RuntimeException("Get unknown object type " + objectType); + } + } + } + } + } + + private HiveObjectRef getObjToRefresh(HiveObjectType type, String dbName, String tblName) throws Exception { + HiveObjectRef objToRefresh = null; + switch (type) { + case DATABASE: + objToRefresh = new HiveObjectRef(HiveObjectType.DATABASE, dbName, null, null, null); + break; + case TABLE: + objToRefresh = new HiveObjectRef(HiveObjectType.TABLE, dbName, tblName, null, null); + break; + case COLUMN: + objToRefresh = new HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, null, null); + break; + default: + throw new RuntimeException("Get unknown object type " + type); + } + return objToRefresh; + } + + private void addGrantPrivilegesToBag(PrivilegeBag privBag, HiveObjectType type, String dbName, String tblName, String columnName) throws Exception { + + HiveResourceACLs objectAcls = null; + + switch(type) { + case DATABASE: + objectAcls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null)); + break; + + case TABLE: + objectAcls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tblName)); + break; + + case COLUMN: + objectAcls = policyProvider.getResourceACLs(new HivePrivilegeObject(HivePrivilegeObjectType.COLUMN, dbName, tblName, null, columnName)); + break; + + default: + throw new RuntimeException("Get unknown object type " + type); + } + + if (objectAcls == null) { + return; + } + + addACLsToBag(objectAcls.getUserPermissions(), privBag, type, dbName, tblName, columnName, PrincipalType.USER); + addACLsToBag(objectAcls.getGroupPermissions(), privBag, type, dbName, tblName, columnName, PrincipalType.GROUP); + } + + @Override + public void run() { + while (true) { + try { + long interval = HiveConf.getTimeVar(hiveConf, ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL, + TimeUnit.SECONDS); + if (hiveConf.getBoolVar(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER)) { + if (!privilegeSynchonizerLatch.await(interval, TimeUnit.SECONDS)) { + continue; + } + LOG.debug("Start synchonize privilege"); + for (String dbName : hiveClient.getAllDatabases()) { + HiveObjectRef dbToRefresh = getObjToRefresh(HiveObjectType.DATABASE, dbName, null); + PrivilegeBag grantDatabaseBag = new PrivilegeBag(); + addGrantPrivilegesToBag(grantDatabaseBag, HiveObjectType.DATABASE, dbName, null, null); + hiveClient.refresh_privileges(dbToRefresh, grantDatabaseBag); + + for (String tblName : hiveClient.getAllTables(dbName)) { + HiveObjectRef tableToRefresh = getObjToRefresh(HiveObjectType.TABLE, dbName, tblName); + PrivilegeBag grantTableBag = new PrivilegeBag(); + addGrantPrivilegesToBag(grantTableBag, HiveObjectType.TABLE, dbName, tblName, null); + hiveClient.refresh_privileges(tableToRefresh, grantTableBag); + + HiveObjectRef tableOfColumnsToRefresh = getObjToRefresh(HiveObjectType.COLUMN, dbName, tblName); + PrivilegeBag grantColumnBag = new PrivilegeBag(); + Table tbl = hiveClient.getTable(dbName, tblName); + for (FieldSchema fs : tbl.getPartitionKeys()) { + addGrantPrivilegesToBag(grantColumnBag, HiveObjectType.COLUMN, dbName, tblName, fs.getName()); + } + for (FieldSchema fs : tbl.getSd().getCols()) { + addGrantPrivilegesToBag(grantColumnBag, HiveObjectType.COLUMN, dbName, tblName, fs.getName()); + } + hiveClient.refresh_privileges(tableOfColumnsToRefresh, grantColumnBag); + } + } + } + // Wait if no exception happens, otherwise, retry immediately + Thread.sleep(interval * 1000); + LOG.debug("Success synchonize privilege"); + } catch (Exception e) { + LOG.error("Error initializing PrivilegeSynchonizer: " + e.getMessage(), e); + } + } + } +} \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java new file mode 100644 index 0000000..6804d16 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.EnumMap; +import java.util.HashMap; +import java.util.Map; + +/** + * Default implementation of {@link HiveResourceACLs}. + */ +public class HiveResourceACLsImpl implements HiveResourceACLs { + + Map> userPermissions = new HashMap>(); + Map> groupPermissions = new HashMap>(); + + @Override + public Map> getUserPermissions() { + return userPermissions; + } + + @Override + public Map> getGroupPermissions() { + return groupPermissions; + } + + public void addUserEntry(String user, Privilege priv, AccessResult result) { + if (userPermissions.containsKey(user)) { + userPermissions.get(user).put(priv, result); + } else { + Map entry = new EnumMap(Privilege.class); + entry.put(priv, result); + userPermissions.put(user, entry); + } + } + + public void addGroupEntry(String group, Privilege priv, AccessResult result) { + if (groupPermissions.containsKey(group)) { + groupPermissions.get(group).put(priv, result); + } else { + Map entry = new EnumMap(Privilege.class); + entry.put(priv, result); + groupPermissions.put(group, entry); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 6003ced..cd242f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -1247,6 +1247,13 @@ public static String getUserFromAuthenticator() { return null; } + public static List getGroupsFromAuthenticator() { + if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) { + return SessionState.get().getAuthenticator().getGroupNames(); + } + return null; + } + static void validateFiles(List newFiles) throws IllegalArgumentException { SessionState ss = SessionState.get(); Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java new file mode 100644 index 0000000..791efa6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.ql.udf.UDFType; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.io.Text; + +// This function is not a deterministic function, but a runtime constant. +// The return value is constant within a query but can be different between queries. +@UDFType(deterministic = false, runtimeConstant = true) +@Description(name = "current_group", value = "_FUNC_() - Returns all groups the current user belongs to", extended = "SessionState GroupsFromAuthenticator") +public class GenericUDFCurrentGroups extends GenericUDF { + protected List currentGroups; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + if (arguments.length != 0) { + throw new UDFArgumentLengthException( + "The function CURRENT_GROUPS does not take any arguments, but found " + arguments.length); + } + + if (currentGroups == null) { + List sessGroupsFromAuth = SessionState.getGroupsFromAuthenticator(); + if (sessGroupsFromAuth != null) { + currentGroups = new ArrayList(); + for (String group : sessGroupsFromAuth) { + currentGroups.add(new Text(group)); + } + } + } + + return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector); + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + return currentGroups; + } + + public List getCurrentGroups() { + return currentGroups; + } + + public void setCurrentGroups(List currentGroups) { + this.currentGroups = currentGroups; + } + + @Override + public String getDisplayString(String[] children) { + return "CURRENT_GROUPS()"; + } + + @Override + public void copyToNewInstance(Object newInstance) throws UDFArgumentException { + super.copyToNewInstance(newInstance); + // Need to preserve currentGroups + GenericUDFCurrentGroups other = (GenericUDFCurrentGroups) newInstance; + if (this.currentGroups != null) { + if (currentGroups != null) { + other.currentGroups = new ArrayList(); + for (Text group : currentGroups) { + other.currentGroups.add(new Text(group)); + } + } + } + } +} \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java new file mode 100644 index 0000000..60a90f3 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.ql.udf.UDFType; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.io.BooleanWritable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +// This function is not a deterministic function, but a runtime constant. +// The return value is constant within a query but can be different between queries. +@UDFType(deterministic = false, runtimeConstant = true) +@Description(name = "restrict_information_schema", + value = "_FUNC_() - Returns whether or not to enable information schema restriction. " + + "Currently it is enabled if either HS2 authorizer or metastore authorizer implements policy provider " + + "interface.") +@NDV(maxNdv = 1) +public class GenericUDFRestrictInformationSchema extends GenericUDF { + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFRestrictInformationSchema.class.getName()); + protected BooleanWritable enabled; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + if (arguments.length != 0) { + throw new UDFArgumentLengthException( + "The function RestrictInformationSchema does not take any arguments, but found " + arguments.length); + } + + if (enabled == null) { + boolean enableHS2PolicyProvider = false; + boolean enableMetastorePolicyProvider = false; + + HiveConf hiveConf = SessionState.getSessionConf(); + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + try { + if (authorizer.getHivePolicyProvider() != null) { + enableHS2PolicyProvider = true; + } + } catch (HiveAuthzPluginException e) { + LOG.warn("Error getting HivePolicyProvider", e); + } + + if (!enableHS2PolicyProvider) { + if (MetastoreConf.getVar(hiveConf, MetastoreConf.ConfVars.PRE_EVENT_LISTENERS) != null && + !MetastoreConf.getVar(hiveConf, MetastoreConf.ConfVars.PRE_EVENT_LISTENERS).isEmpty() && + HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER) != null) { + List authorizerProviders; + try { + authorizerProviders = HiveUtils.getMetaStoreAuthorizeProviderManagers( + hiveConf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, + SessionState.get().getAuthenticator()); + for (HiveMetastoreAuthorizationProvider authProvider : authorizerProviders) { + if (authProvider.getHivePolicyProvider() != null) { + enableMetastorePolicyProvider = true; + break; + } + } + } catch (HiveAuthzPluginException e) { + LOG.warn("Error getting HivePolicyProvider", e); + } catch (HiveException e) { + LOG.warn("Error instantiating hive.security.metastore.authorization.manager", e); + } + } + } + + if (enableHS2PolicyProvider || enableMetastorePolicyProvider) { + enabled = new BooleanWritable(true); + } else { + enabled = new BooleanWritable(false); + } + } + + return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + return enabled; + } + + @Override + public String getDisplayString(String[] children) { + return "RESTRICT_INFORMATION_SCHEMA()"; + } + + @Override + public void copyToNewInstance(Object newInstance) throws UDFArgumentException { + super.copyToNewInstance(newInstance); + // Need to preserve enabled flag + GenericUDFRestrictInformationSchema other = (GenericUDFRestrictInformationSchema) newInstance; + if (this.enabled != null) { + other.enabled = new BooleanWritable(this.enabled.get()); + } + } +} diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 47f84b5..ca172bf 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -56,6 +56,7 @@ import org.apache.curator.framework.api.BackgroundCallback; import org.apache.curator.framework.api.CuratorEvent; import org.apache.curator.framework.api.CuratorEventType; +import org.apache.curator.framework.recipes.leader.LeaderLatch; import org.apache.curator.framework.recipes.leader.LeaderLatchListener; import org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode; import org.apache.curator.retry.ExponentialBackoffRetry; @@ -80,6 +81,9 @@ import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry; +import org.apache.hadoop.hive.ql.security.authorization.PrivilegeSynchonizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider; import org.apache.hadoop.hive.ql.session.ClearDanglingScratchDir; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.util.ZooKeeperHiveHelper; @@ -134,6 +138,7 @@ private ThriftCLIService thriftCLIService; private PersistentEphemeralNode znode; private CuratorFramework zooKeeperClient; + private CuratorFramework zKClientForPrivSync = null; private boolean deregisteredWithZooKeeper = false; // Set to true only when deregistration happens private HttpServer webServer; // Web UI private TezSessionPoolManager tezSessionPoolManager; @@ -405,17 +410,9 @@ public static boolean isKerberosAuthMode(Configuration hiveConf) { } }; - /** - * Adds a server instance to ZooKeeper as a znode. - * - * @param hiveConf - * @throws Exception - */ - private void addServerInstanceToZooKeeper(HiveConf hiveConf, Map confsToPublish) throws Exception { - String zooKeeperEnsemble = ZooKeeperHiveHelper.getQuorumServers(hiveConf); - String rootNamespace = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE); - String instanceURI = getServerInstanceURI(); + private CuratorFramework startZookeeperClient(HiveConf hiveConf) throws Exception { setUpZooKeeperAuth(hiveConf); + String zooKeeperEnsemble = ZooKeeperHiveHelper.getQuorumServers(hiveConf); int sessionTimeout = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT, TimeUnit.MILLISECONDS); @@ -425,14 +422,16 @@ private void addServerInstanceToZooKeeper(HiveConf hiveConf, Map int maxRetries = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES); // Create a CuratorFramework instance to be used as the ZooKeeper client // Use the zooKeeperAclProvider to create appropriate ACLs - zooKeeperClient = + CuratorFramework zkClient = CuratorFrameworkFactory.builder().connectString(zooKeeperEnsemble) .sessionTimeoutMs(sessionTimeout).aclProvider(zooKeeperAclProvider) .retryPolicy(new ExponentialBackoffRetry(baseSleepTime, maxRetries)).build(); - zooKeeperClient.start(); + zkClient.start(); + // Create the parent znodes recursively; ignore if the parent already exists. + String rootNamespace = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE); try { - zooKeeperClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT) + zkClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT) .forPath(ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace); LOG.info("Created the root name space: " + rootNamespace + " on ZooKeeper for HiveServer2"); } catch (KeeperException e) { @@ -441,6 +440,20 @@ private void addServerInstanceToZooKeeper(HiveConf hiveConf, Map throw e; } } + return zkClient; + } + + /** + * Adds a server instance to ZooKeeper as a znode. + * + * @param hiveConf + * @throws Exception + */ + private void addServerInstanceToZooKeeper(HiveConf hiveConf, Map confsToPublish) throws Exception { + zooKeeperClient = startZookeeperClient(hiveConf); + String rootNamespace = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE); + String instanceURI = getServerInstanceURI(); + // Create a znode under the rootNamespace parent for this instance of the server // Znode name: serverUri=host:port;version=versionInfo;sequence=sequenceNumber try { @@ -653,6 +666,14 @@ public synchronized void start() { throw new ServiceException(e); } } + + try { + startPrivilegeSynchonizer(hiveConf); + } catch (Exception e) { + LOG.error("Error starting priviledge synchonizer: ", e); + throw new ServiceException(e); + } + if (webServer != null) { try { webServer.start(); @@ -851,6 +872,10 @@ public synchronized void stop() { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex); } } + + if (zKClientForPrivSync != null) { + zKClientForPrivSync.close(); + } } private void shutdownExecutor(final ExecutorService leaderActionsExecutorService) { @@ -884,6 +909,28 @@ public static void scheduleClearDanglingScratchDir(HiveConf hiveConf, int initia } } + public void startPrivilegeSynchonizer(HiveConf hiveConf) throws Exception { + if (hiveConf.getBoolVar(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER)) { + zKClientForPrivSync = startZookeeperClient(hiveConf); + String rootNamespace = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE); + String path = ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace + + ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + "leader"; + LeaderLatch privilegeSynchonizerLatch = new LeaderLatch(zKClientForPrivSync, path); + privilegeSynchonizerLatch.start(); + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + HivePolicyProvider policyProvider = authorizer.getHivePolicyProvider(); + if (policyProvider == null) { + LOG.warn("Cannot start PrivilegeSynchonizer, policyProvider of " + authorizer.getClass().getName() + + " is null"); + privilegeSynchonizerLatch.close(); + return; + } + Thread privilegeSynchonizerThread = new Thread(new PrivilegeSynchonizer(privilegeSynchonizerLatch, + policyProvider, hiveConf)); + privilegeSynchonizerThread.start(); + } + } + private static void startHiveServer2() throws Throwable { long attempts = 0, maxAttempts = 1; while (true) { diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp index fd52f09..c0771fa 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp +++ b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp @@ -33225,6 +33225,229 @@ uint32_t ThriftHiveMetastore_grant_revoke_privileges_presult::read(::apache::thr } +ThriftHiveMetastore_refresh_privileges_args::~ThriftHiveMetastore_refresh_privileges_args() throw() { +} + + +uint32_t ThriftHiveMetastore_refresh_privileges_args::read(::apache::thrift::protocol::TProtocol* iprot) { + + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + switch (fid) + { + case 1: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->objToRefresh.read(iprot); + this->__isset.objToRefresh = true; + } else { + xfer += iprot->skip(ftype); + } + break; + case 2: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->grantRequest.read(iprot); + this->__isset.grantRequest = true; + } else { + xfer += iprot->skip(ftype); + } + break; + default: + xfer += iprot->skip(ftype); + break; + } + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + +uint32_t ThriftHiveMetastore_refresh_privileges_args::write(::apache::thrift::protocol::TProtocol* oprot) const { + uint32_t xfer = 0; + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); + xfer += oprot->writeStructBegin("ThriftHiveMetastore_refresh_privileges_args"); + + xfer += oprot->writeFieldBegin("objToRefresh", ::apache::thrift::protocol::T_STRUCT, 1); + xfer += this->objToRefresh.write(oprot); + xfer += oprot->writeFieldEnd(); + + xfer += oprot->writeFieldBegin("grantRequest", ::apache::thrift::protocol::T_STRUCT, 2); + xfer += this->grantRequest.write(oprot); + xfer += oprot->writeFieldEnd(); + + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + + +ThriftHiveMetastore_refresh_privileges_pargs::~ThriftHiveMetastore_refresh_privileges_pargs() throw() { +} + + +uint32_t ThriftHiveMetastore_refresh_privileges_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { + uint32_t xfer = 0; + apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); + xfer += oprot->writeStructBegin("ThriftHiveMetastore_refresh_privileges_pargs"); + + xfer += oprot->writeFieldBegin("objToRefresh", ::apache::thrift::protocol::T_STRUCT, 1); + xfer += (*(this->objToRefresh)).write(oprot); + xfer += oprot->writeFieldEnd(); + + xfer += oprot->writeFieldBegin("grantRequest", ::apache::thrift::protocol::T_STRUCT, 2); + xfer += (*(this->grantRequest)).write(oprot); + xfer += oprot->writeFieldEnd(); + + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + + +ThriftHiveMetastore_refresh_privileges_result::~ThriftHiveMetastore_refresh_privileges_result() throw() { +} + + +uint32_t ThriftHiveMetastore_refresh_privileges_result::read(::apache::thrift::protocol::TProtocol* iprot) { + + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + switch (fid) + { + case 0: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->success.read(iprot); + this->__isset.success = true; + } else { + xfer += iprot->skip(ftype); + } + break; + case 1: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->o1.read(iprot); + this->__isset.o1 = true; + } else { + xfer += iprot->skip(ftype); + } + break; + default: + xfer += iprot->skip(ftype); + break; + } + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + +uint32_t ThriftHiveMetastore_refresh_privileges_result::write(::apache::thrift::protocol::TProtocol* oprot) const { + + uint32_t xfer = 0; + + xfer += oprot->writeStructBegin("ThriftHiveMetastore_refresh_privileges_result"); + + if (this->__isset.success) { + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_STRUCT, 0); + xfer += this->success.write(oprot); + xfer += oprot->writeFieldEnd(); + } else if (this->__isset.o1) { + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1); + xfer += this->o1.write(oprot); + xfer += oprot->writeFieldEnd(); + } + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + + +ThriftHiveMetastore_refresh_privileges_presult::~ThriftHiveMetastore_refresh_privileges_presult() throw() { +} + + +uint32_t ThriftHiveMetastore_refresh_privileges_presult::read(::apache::thrift::protocol::TProtocol* iprot) { + + apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + switch (fid) + { + case 0: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += (*(this->success)).read(iprot); + this->__isset.success = true; + } else { + xfer += iprot->skip(ftype); + } + break; + case 1: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->o1.read(iprot); + this->__isset.o1 = true; + } else { + xfer += iprot->skip(ftype); + } + break; + default: + xfer += iprot->skip(ftype); + break; + } + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + + ThriftHiveMetastore_set_ugi_args::~ThriftHiveMetastore_set_ugi_args() throw() { } @@ -57399,6 +57622,68 @@ void ThriftHiveMetastoreClient::recv_grant_revoke_privileges(GrantRevokePrivileg throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "grant_revoke_privileges failed: unknown result"); } +void ThriftHiveMetastoreClient::refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) +{ + send_refresh_privileges(objToRefresh, grantRequest); + recv_refresh_privileges(_return); +} + +void ThriftHiveMetastoreClient::send_refresh_privileges(const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) +{ + int32_t cseqid = 0; + oprot_->writeMessageBegin("refresh_privileges", ::apache::thrift::protocol::T_CALL, cseqid); + + ThriftHiveMetastore_refresh_privileges_pargs args; + args.objToRefresh = &objToRefresh; + args.grantRequest = &grantRequest; + args.write(oprot_); + + oprot_->writeMessageEnd(); + oprot_->getTransport()->writeEnd(); + oprot_->getTransport()->flush(); +} + +void ThriftHiveMetastoreClient::recv_refresh_privileges(GrantRevokePrivilegeResponse& _return) +{ + + int32_t rseqid = 0; + std::string fname; + ::apache::thrift::protocol::TMessageType mtype; + + iprot_->readMessageBegin(fname, mtype, rseqid); + if (mtype == ::apache::thrift::protocol::T_EXCEPTION) { + ::apache::thrift::TApplicationException x; + x.read(iprot_); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + throw x; + } + if (mtype != ::apache::thrift::protocol::T_REPLY) { + iprot_->skip(::apache::thrift::protocol::T_STRUCT); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + } + if (fname.compare("refresh_privileges") != 0) { + iprot_->skip(::apache::thrift::protocol::T_STRUCT); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + } + ThriftHiveMetastore_refresh_privileges_presult result; + result.success = &_return; + result.read(iprot_); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + + if (result.__isset.success) { + // _return pointer has now been filled + return; + } + if (result.__isset.o1) { + throw result.o1; + } + throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "refresh_privileges failed: unknown result"); +} + void ThriftHiveMetastoreClient::set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names) { send_set_ugi(user_name, group_names); @@ -69654,6 +69939,63 @@ void ThriftHiveMetastoreProcessor::process_grant_revoke_privileges(int32_t seqid } } +void ThriftHiveMetastoreProcessor::process_refresh_privileges(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext) +{ + void* ctx = NULL; + if (this->eventHandler_.get() != NULL) { + ctx = this->eventHandler_->getContext("ThriftHiveMetastore.refresh_privileges", callContext); + } + ::apache::thrift::TProcessorContextFreer freer(this->eventHandler_.get(), ctx, "ThriftHiveMetastore.refresh_privileges"); + + if (this->eventHandler_.get() != NULL) { + this->eventHandler_->preRead(ctx, "ThriftHiveMetastore.refresh_privileges"); + } + + ThriftHiveMetastore_refresh_privileges_args args; + args.read(iprot); + iprot->readMessageEnd(); + uint32_t bytes = iprot->getTransport()->readEnd(); + + if (this->eventHandler_.get() != NULL) { + this->eventHandler_->postRead(ctx, "ThriftHiveMetastore.refresh_privileges", bytes); + } + + ThriftHiveMetastore_refresh_privileges_result result; + try { + iface_->refresh_privileges(result.success, args.objToRefresh, args.grantRequest); + result.__isset.success = true; + } catch (MetaException &o1) { + result.o1 = o1; + result.__isset.o1 = true; + } catch (const std::exception& e) { + if (this->eventHandler_.get() != NULL) { + this->eventHandler_->handlerError(ctx, "ThriftHiveMetastore.refresh_privileges"); + } + + ::apache::thrift::TApplicationException x(e.what()); + oprot->writeMessageBegin("refresh_privileges", ::apache::thrift::protocol::T_EXCEPTION, seqid); + x.write(oprot); + oprot->writeMessageEnd(); + oprot->getTransport()->writeEnd(); + oprot->getTransport()->flush(); + return; + } + + if (this->eventHandler_.get() != NULL) { + this->eventHandler_->preWrite(ctx, "ThriftHiveMetastore.refresh_privileges"); + } + + oprot->writeMessageBegin("refresh_privileges", ::apache::thrift::protocol::T_REPLY, seqid); + result.write(oprot); + oprot->writeMessageEnd(); + bytes = oprot->getTransport()->writeEnd(); + oprot->getTransport()->flush(); + + if (this->eventHandler_.get() != NULL) { + this->eventHandler_->postWrite(ctx, "ThriftHiveMetastore.refresh_privileges", bytes); + } +} + void ThriftHiveMetastoreProcessor::process_set_ugi(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext) { void* ctx = NULL; @@ -85764,6 +86106,95 @@ void ThriftHiveMetastoreConcurrentClient::recv_grant_revoke_privileges(GrantRevo } // end while(true) } +void ThriftHiveMetastoreConcurrentClient::refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) +{ + int32_t seqid = send_refresh_privileges(objToRefresh, grantRequest); + recv_refresh_privileges(_return, seqid); +} + +int32_t ThriftHiveMetastoreConcurrentClient::send_refresh_privileges(const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) +{ + int32_t cseqid = this->sync_.generateSeqId(); + ::apache::thrift::async::TConcurrentSendSentry sentry(&this->sync_); + oprot_->writeMessageBegin("refresh_privileges", ::apache::thrift::protocol::T_CALL, cseqid); + + ThriftHiveMetastore_refresh_privileges_pargs args; + args.objToRefresh = &objToRefresh; + args.grantRequest = &grantRequest; + args.write(oprot_); + + oprot_->writeMessageEnd(); + oprot_->getTransport()->writeEnd(); + oprot_->getTransport()->flush(); + + sentry.commit(); + return cseqid; +} + +void ThriftHiveMetastoreConcurrentClient::recv_refresh_privileges(GrantRevokePrivilegeResponse& _return, const int32_t seqid) +{ + + int32_t rseqid = 0; + std::string fname; + ::apache::thrift::protocol::TMessageType mtype; + + // the read mutex gets dropped and reacquired as part of waitForWork() + // The destructor of this sentry wakes up other clients + ::apache::thrift::async::TConcurrentRecvSentry sentry(&this->sync_, seqid); + + while(true) { + if(!this->sync_.getPending(fname, mtype, rseqid)) { + iprot_->readMessageBegin(fname, mtype, rseqid); + } + if(seqid == rseqid) { + if (mtype == ::apache::thrift::protocol::T_EXCEPTION) { + ::apache::thrift::TApplicationException x; + x.read(iprot_); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + sentry.commit(); + throw x; + } + if (mtype != ::apache::thrift::protocol::T_REPLY) { + iprot_->skip(::apache::thrift::protocol::T_STRUCT); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + } + if (fname.compare("refresh_privileges") != 0) { + iprot_->skip(::apache::thrift::protocol::T_STRUCT); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + + // in a bad state, don't commit + using ::apache::thrift::protocol::TProtocolException; + throw TProtocolException(TProtocolException::INVALID_DATA); + } + ThriftHiveMetastore_refresh_privileges_presult result; + result.success = &_return; + result.read(iprot_); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + + if (result.__isset.success) { + // _return pointer has now been filled + sentry.commit(); + return; + } + if (result.__isset.o1) { + sentry.commit(); + throw result.o1; + } + // in a bad state, don't commit + throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "refresh_privileges failed: unknown result"); + } + // seqid != rseqid + this->sync_.updatePending(fname, mtype, rseqid); + + // this will temporarily unlock the readMutex, and let other clients get work done + this->sync_.waitForWork(seqid); + } // end while(true) +} + void ThriftHiveMetastoreConcurrentClient::set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names) { int32_t seqid = send_set_ugi(user_name, group_names); diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h index 802d8e3..a6322ff 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h +++ b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h @@ -149,6 +149,7 @@ class ThriftHiveMetastoreIf : virtual public ::facebook::fb303::FacebookService virtual bool grant_privileges(const PrivilegeBag& privileges) = 0; virtual bool revoke_privileges(const PrivilegeBag& privileges) = 0; virtual void grant_revoke_privileges(GrantRevokePrivilegeResponse& _return, const GrantRevokePrivilegeRequest& request) = 0; + virtual void refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) = 0; virtual void set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names) = 0; virtual void get_delegation_token(std::string& _return, const std::string& token_owner, const std::string& renewer_kerberos_principal_name) = 0; virtual int64_t renew_delegation_token(const std::string& token_str_form) = 0; @@ -656,6 +657,9 @@ class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual p void grant_revoke_privileges(GrantRevokePrivilegeResponse& /* _return */, const GrantRevokePrivilegeRequest& /* request */) { return; } + void refresh_privileges(GrantRevokePrivilegeResponse& /* _return */, const HiveObjectRef& /* objToRefresh */, const GrantRevokePrivilegeRequest& /* grantRequest */) { + return; + } void set_ugi(std::vector & /* _return */, const std::string& /* user_name */, const std::vector & /* group_names */) { return; } @@ -17156,6 +17160,125 @@ class ThriftHiveMetastore_grant_revoke_privileges_presult { }; +typedef struct _ThriftHiveMetastore_refresh_privileges_args__isset { + _ThriftHiveMetastore_refresh_privileges_args__isset() : objToRefresh(false), grantRequest(false) {} + bool objToRefresh :1; + bool grantRequest :1; +} _ThriftHiveMetastore_refresh_privileges_args__isset; + +class ThriftHiveMetastore_refresh_privileges_args { + public: + + ThriftHiveMetastore_refresh_privileges_args(const ThriftHiveMetastore_refresh_privileges_args&); + ThriftHiveMetastore_refresh_privileges_args& operator=(const ThriftHiveMetastore_refresh_privileges_args&); + ThriftHiveMetastore_refresh_privileges_args() { + } + + virtual ~ThriftHiveMetastore_refresh_privileges_args() throw(); + HiveObjectRef objToRefresh; + GrantRevokePrivilegeRequest grantRequest; + + _ThriftHiveMetastore_refresh_privileges_args__isset __isset; + + void __set_objToRefresh(const HiveObjectRef& val); + + void __set_grantRequest(const GrantRevokePrivilegeRequest& val); + + bool operator == (const ThriftHiveMetastore_refresh_privileges_args & rhs) const + { + if (!(objToRefresh == rhs.objToRefresh)) + return false; + if (!(grantRequest == rhs.grantRequest)) + return false; + return true; + } + bool operator != (const ThriftHiveMetastore_refresh_privileges_args &rhs) const { + return !(*this == rhs); + } + + bool operator < (const ThriftHiveMetastore_refresh_privileges_args & ) const; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot); + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; + +}; + + +class ThriftHiveMetastore_refresh_privileges_pargs { + public: + + + virtual ~ThriftHiveMetastore_refresh_privileges_pargs() throw(); + const HiveObjectRef* objToRefresh; + const GrantRevokePrivilegeRequest* grantRequest; + + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; + +}; + +typedef struct _ThriftHiveMetastore_refresh_privileges_result__isset { + _ThriftHiveMetastore_refresh_privileges_result__isset() : success(false), o1(false) {} + bool success :1; + bool o1 :1; +} _ThriftHiveMetastore_refresh_privileges_result__isset; + +class ThriftHiveMetastore_refresh_privileges_result { + public: + + ThriftHiveMetastore_refresh_privileges_result(const ThriftHiveMetastore_refresh_privileges_result&); + ThriftHiveMetastore_refresh_privileges_result& operator=(const ThriftHiveMetastore_refresh_privileges_result&); + ThriftHiveMetastore_refresh_privileges_result() { + } + + virtual ~ThriftHiveMetastore_refresh_privileges_result() throw(); + GrantRevokePrivilegeResponse success; + MetaException o1; + + _ThriftHiveMetastore_refresh_privileges_result__isset __isset; + + void __set_success(const GrantRevokePrivilegeResponse& val); + + void __set_o1(const MetaException& val); + + bool operator == (const ThriftHiveMetastore_refresh_privileges_result & rhs) const + { + if (!(success == rhs.success)) + return false; + if (!(o1 == rhs.o1)) + return false; + return true; + } + bool operator != (const ThriftHiveMetastore_refresh_privileges_result &rhs) const { + return !(*this == rhs); + } + + bool operator < (const ThriftHiveMetastore_refresh_privileges_result & ) const; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot); + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; + +}; + +typedef struct _ThriftHiveMetastore_refresh_privileges_presult__isset { + _ThriftHiveMetastore_refresh_privileges_presult__isset() : success(false), o1(false) {} + bool success :1; + bool o1 :1; +} _ThriftHiveMetastore_refresh_privileges_presult__isset; + +class ThriftHiveMetastore_refresh_privileges_presult { + public: + + + virtual ~ThriftHiveMetastore_refresh_privileges_presult() throw(); + GrantRevokePrivilegeResponse* success; + MetaException o1; + + _ThriftHiveMetastore_refresh_privileges_presult__isset __isset; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot); + +}; + typedef struct _ThriftHiveMetastore_set_ugi_args__isset { _ThriftHiveMetastore_set_ugi_args__isset() : user_name(false), group_names(false) {} bool user_name :1; @@ -26036,6 +26159,9 @@ class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public void grant_revoke_privileges(GrantRevokePrivilegeResponse& _return, const GrantRevokePrivilegeRequest& request); void send_grant_revoke_privileges(const GrantRevokePrivilegeRequest& request); void recv_grant_revoke_privileges(GrantRevokePrivilegeResponse& _return); + void refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest); + void send_refresh_privileges(const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest); + void recv_refresh_privileges(GrantRevokePrivilegeResponse& _return); void set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names); void send_set_ugi(const std::string& user_name, const std::vector & group_names); void recv_set_ugi(std::vector & _return); @@ -26398,6 +26524,7 @@ class ThriftHiveMetastoreProcessor : public ::facebook::fb303::FacebookServiceP void process_grant_privileges(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); void process_revoke_privileges(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); void process_grant_revoke_privileges(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); + void process_refresh_privileges(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); void process_set_ugi(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); void process_get_delegation_token(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); void process_renew_delegation_token(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext); @@ -26604,6 +26731,7 @@ class ThriftHiveMetastoreProcessor : public ::facebook::fb303::FacebookServiceP processMap_["grant_privileges"] = &ThriftHiveMetastoreProcessor::process_grant_privileges; processMap_["revoke_privileges"] = &ThriftHiveMetastoreProcessor::process_revoke_privileges; processMap_["grant_revoke_privileges"] = &ThriftHiveMetastoreProcessor::process_grant_revoke_privileges; + processMap_["refresh_privileges"] = &ThriftHiveMetastoreProcessor::process_refresh_privileges; processMap_["set_ugi"] = &ThriftHiveMetastoreProcessor::process_set_ugi; processMap_["get_delegation_token"] = &ThriftHiveMetastoreProcessor::process_get_delegation_token; processMap_["renew_delegation_token"] = &ThriftHiveMetastoreProcessor::process_renew_delegation_token; @@ -27928,6 +28056,16 @@ class ThriftHiveMetastoreMultiface : virtual public ThriftHiveMetastoreIf, publi return; } + void refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) { + size_t sz = ifaces_.size(); + size_t i = 0; + for (; i < (sz - 1); ++i) { + ifaces_[i]->refresh_privileges(_return, objToRefresh, grantRequest); + } + ifaces_[i]->refresh_privileges(_return, objToRefresh, grantRequest); + return; + } + void set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names) { size_t sz = ifaces_.size(); size_t i = 0; @@ -29051,6 +29189,9 @@ class ThriftHiveMetastoreConcurrentClient : virtual public ThriftHiveMetastoreIf void grant_revoke_privileges(GrantRevokePrivilegeResponse& _return, const GrantRevokePrivilegeRequest& request); int32_t send_grant_revoke_privileges(const GrantRevokePrivilegeRequest& request); void recv_grant_revoke_privileges(GrantRevokePrivilegeResponse& _return, const int32_t seqid); + void refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest); + int32_t send_refresh_privileges(const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest); + void recv_refresh_privileges(GrantRevokePrivilegeResponse& _return, const int32_t seqid); void set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names); int32_t send_set_ugi(const std::string& user_name, const std::vector & group_names); void recv_set_ugi(std::vector & _return, const int32_t seqid); diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp index c0a39f8..c63f540 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp +++ b/standalone-metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp @@ -657,6 +657,11 @@ class ThriftHiveMetastoreHandler : virtual public ThriftHiveMetastoreIf { printf("grant_revoke_privileges\n"); } + void refresh_privileges(GrantRevokePrivilegeResponse& _return, const HiveObjectRef& objToRefresh, const GrantRevokePrivilegeRequest& grantRequest) { + // Your implementation goes here + printf("refresh_privileges\n"); + } + void set_ugi(std::vector & _return, const std::string& user_name, const std::vector & group_names) { // Your implementation goes here printf("set_ugi\n"); diff --git a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java index afe82e3..4a642cf 100644 --- a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java +++ b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java @@ -296,6 +296,8 @@ public GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request) throws MetaException, org.apache.thrift.TException; + public GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest) throws MetaException, org.apache.thrift.TException; + public List set_ugi(String user_name, List group_names) throws MetaException, org.apache.thrift.TException; public String get_delegation_token(String token_owner, String renewer_kerberos_principal_name) throws MetaException, org.apache.thrift.TException; @@ -704,6 +706,8 @@ public void grant_revoke_privileges(GrantRevokePrivilegeRequest request, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; + public void refresh_privileges(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; + public void set_ugi(String user_name, List group_names, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; public void get_delegation_token(String token_owner, String renewer_kerberos_principal_name, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException; @@ -4690,6 +4694,33 @@ public GrantRevokePrivilegeResponse recv_grant_revoke_privileges() throws MetaEx throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "grant_revoke_privileges failed: unknown result"); } + public GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest) throws MetaException, org.apache.thrift.TException + { + send_refresh_privileges(objToRefresh, grantRequest); + return recv_refresh_privileges(); + } + + public void send_refresh_privileges(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest) throws org.apache.thrift.TException + { + refresh_privileges_args args = new refresh_privileges_args(); + args.setObjToRefresh(objToRefresh); + args.setGrantRequest(grantRequest); + sendBase("refresh_privileges", args); + } + + public GrantRevokePrivilegeResponse recv_refresh_privileges() throws MetaException, org.apache.thrift.TException + { + refresh_privileges_result result = new refresh_privileges_result(); + receiveBase(result, "refresh_privileges"); + if (result.isSetSuccess()) { + return result.success; + } + if (result.o1 != null) { + throw result.o1; + } + throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "refresh_privileges failed: unknown result"); + } + public List set_ugi(String user_name, List group_names) throws MetaException, org.apache.thrift.TException { send_set_ugi(user_name, group_names); @@ -11257,6 +11288,41 @@ public GrantRevokePrivilegeResponse getResult() throws MetaException, org.apache } } + public void refresh_privileges(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { + checkReady(); + refresh_privileges_call method_call = new refresh_privileges_call(objToRefresh, grantRequest, resultHandler, this, ___protocolFactory, ___transport); + this.___currentMethod = method_call; + ___manager.call(method_call); + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class refresh_privileges_call extends org.apache.thrift.async.TAsyncMethodCall { + private HiveObjectRef objToRefresh; + private GrantRevokePrivilegeRequest grantRequest; + public refresh_privileges_call(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException { + super(client, protocolFactory, transport, resultHandler, false); + this.objToRefresh = objToRefresh; + this.grantRequest = grantRequest; + } + + public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException { + prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("refresh_privileges", org.apache.thrift.protocol.TMessageType.CALL, 0)); + refresh_privileges_args args = new refresh_privileges_args(); + args.setObjToRefresh(objToRefresh); + args.setGrantRequest(grantRequest); + args.write(prot); + prot.writeMessageEnd(); + } + + public GrantRevokePrivilegeResponse getResult() throws MetaException, org.apache.thrift.TException { + if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) { + throw new IllegalStateException("Method call not finished!"); + } + org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array()); + org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); + return (new Client(prot)).recv_refresh_privileges(); + } + } + public void set_ugi(String user_name, List group_names, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException { checkReady(); set_ugi_call method_call = new set_ugi_call(user_name, group_names, resultHandler, this, ___protocolFactory, ___transport); @@ -13800,6 +13866,7 @@ protected Processor(I iface, Map extends org.apache.thrift.ProcessFunction { + public refresh_privileges() { + super("refresh_privileges"); + } + + public refresh_privileges_args getEmptyArgsInstance() { + return new refresh_privileges_args(); + } + + protected boolean isOneway() { + return false; + } + + public refresh_privileges_result getResult(I iface, refresh_privileges_args args) throws org.apache.thrift.TException { + refresh_privileges_result result = new refresh_privileges_result(); + try { + result.success = iface.refresh_privileges(args.objToRefresh, args.grantRequest); + } catch (MetaException o1) { + result.o1 = o1; + } + return result; + } + } + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class set_ugi extends org.apache.thrift.ProcessFunction { public set_ugi() { super("set_ugi"); @@ -19208,6 +19299,7 @@ protected AsyncProcessor(I iface, Map extends org.apache.thrift.AsyncProcessFunction> { - public set_ugi() { - super("set_ugi"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class refresh_privileges extends org.apache.thrift.AsyncProcessFunction { + public refresh_privileges() { + super("refresh_privileges"); } - public set_ugi_args getEmptyArgsInstance() { - return new set_ugi_args(); + public refresh_privileges_args getEmptyArgsInstance() { + return new refresh_privileges_args(); } - public AsyncMethodCallback> getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback>() { - public void onComplete(List o) { - set_ugi_result result = new set_ugi_result(); + return new AsyncMethodCallback() { + public void onComplete(GrantRevokePrivilegeResponse o) { + refresh_privileges_result result = new refresh_privileges_result(); result.success = o; try { fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); @@ -27285,7 +27377,7 @@ public void onComplete(List o) { public void onError(Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TBase msg; - set_ugi_result result = new set_ugi_result(); + refresh_privileges_result result = new refresh_privileges_result(); if (e instanceof MetaException) { result.o1 = (MetaException) e; result.setO1IsSet(true); @@ -27311,25 +27403,25 @@ protected boolean isOneway() { return false; } - public void start(I iface, set_ugi_args args, org.apache.thrift.async.AsyncMethodCallback> resultHandler) throws TException { - iface.set_ugi(args.user_name, args.group_names,resultHandler); + public void start(I iface, refresh_privileges_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.refresh_privileges(args.objToRefresh, args.grantRequest,resultHandler); } } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_delegation_token extends org.apache.thrift.AsyncProcessFunction { - public get_delegation_token() { - super("get_delegation_token"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class set_ugi extends org.apache.thrift.AsyncProcessFunction> { + public set_ugi() { + super("set_ugi"); } - public get_delegation_token_args getEmptyArgsInstance() { - return new get_delegation_token_args(); + public set_ugi_args getEmptyArgsInstance() { + return new set_ugi_args(); } - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + public AsyncMethodCallback> getResultHandler(final AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(String o) { - get_delegation_token_result result = new get_delegation_token_result(); + return new AsyncMethodCallback>() { + public void onComplete(List o) { + set_ugi_result result = new set_ugi_result(); result.success = o; try { fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); @@ -27342,7 +27434,7 @@ public void onComplete(String o) { public void onError(Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TBase msg; - get_delegation_token_result result = new get_delegation_token_result(); + set_ugi_result result = new set_ugi_result(); if (e instanceof MetaException) { result.o1 = (MetaException) e; result.setO1IsSet(true); @@ -27368,83 +27460,26 @@ protected boolean isOneway() { return false; } - public void start(I iface, get_delegation_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { - iface.get_delegation_token(args.token_owner, args.renewer_kerberos_principal_name,resultHandler); + public void start(I iface, set_ugi_args args, org.apache.thrift.async.AsyncMethodCallback> resultHandler) throws TException { + iface.set_ugi(args.user_name, args.group_names,resultHandler); } } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class renew_delegation_token extends org.apache.thrift.AsyncProcessFunction { - public renew_delegation_token() { - super("renew_delegation_token"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_delegation_token extends org.apache.thrift.AsyncProcessFunction { + public get_delegation_token() { + super("get_delegation_token"); } - public renew_delegation_token_args getEmptyArgsInstance() { - return new renew_delegation_token_args(); + public get_delegation_token_args getEmptyArgsInstance() { + return new get_delegation_token_args(); } - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(Long o) { - renew_delegation_token_result result = new renew_delegation_token_result(); + return new AsyncMethodCallback() { + public void onComplete(String o) { + get_delegation_token_result result = new get_delegation_token_result(); result.success = o; - result.setSuccessIsSet(true); - try { - fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); - return; - } catch (Exception e) { - LOGGER.error("Exception writing to internal frame buffer", e); - } - fb.close(); - } - public void onError(Exception e) { - byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; - org.apache.thrift.TBase msg; - renew_delegation_token_result result = new renew_delegation_token_result(); - if (e instanceof MetaException) { - result.o1 = (MetaException) e; - result.setO1IsSet(true); - msg = result; - } - else - { - msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; - msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); - } - try { - fcall.sendResponse(fb,msg,msgType,seqid); - return; - } catch (Exception ex) { - LOGGER.error("Exception writing to internal frame buffer", ex); - } - fb.close(); - } - }; - } - - protected boolean isOneway() { - return false; - } - - public void start(I iface, renew_delegation_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { - iface.renew_delegation_token(args.token_str_form,resultHandler); - } - } - - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class cancel_delegation_token extends org.apache.thrift.AsyncProcessFunction { - public cancel_delegation_token() { - super("cancel_delegation_token"); - } - - public cancel_delegation_token_args getEmptyArgsInstance() { - return new cancel_delegation_token_args(); - } - - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { - final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(Void o) { - cancel_delegation_token_result result = new cancel_delegation_token_result(); try { fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); return; @@ -27456,7 +27491,7 @@ public void onComplete(Void o) { public void onError(Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TBase msg; - cancel_delegation_token_result result = new cancel_delegation_token_result(); + get_delegation_token_result result = new get_delegation_token_result(); if (e instanceof MetaException) { result.o1 = (MetaException) e; result.setO1IsSet(true); @@ -27482,231 +27517,25 @@ protected boolean isOneway() { return false; } - public void start(I iface, cancel_delegation_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { - iface.cancel_delegation_token(args.token_str_form,resultHandler); - } - } - - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_token extends org.apache.thrift.AsyncProcessFunction { - public add_token() { - super("add_token"); - } - - public add_token_args getEmptyArgsInstance() { - return new add_token_args(); - } - - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { - final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(Boolean o) { - add_token_result result = new add_token_result(); - result.success = o; - result.setSuccessIsSet(true); - try { - fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); - return; - } catch (Exception e) { - LOGGER.error("Exception writing to internal frame buffer", e); - } - fb.close(); - } - public void onError(Exception e) { - byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; - org.apache.thrift.TBase msg; - add_token_result result = new add_token_result(); - { - msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; - msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); - } - try { - fcall.sendResponse(fb,msg,msgType,seqid); - return; - } catch (Exception ex) { - LOGGER.error("Exception writing to internal frame buffer", ex); - } - fb.close(); - } - }; - } - - protected boolean isOneway() { - return false; - } - - public void start(I iface, add_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { - iface.add_token(args.token_identifier, args.delegation_token,resultHandler); - } - } - - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class remove_token extends org.apache.thrift.AsyncProcessFunction { - public remove_token() { - super("remove_token"); - } - - public remove_token_args getEmptyArgsInstance() { - return new remove_token_args(); - } - - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { - final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(Boolean o) { - remove_token_result result = new remove_token_result(); - result.success = o; - result.setSuccessIsSet(true); - try { - fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); - return; - } catch (Exception e) { - LOGGER.error("Exception writing to internal frame buffer", e); - } - fb.close(); - } - public void onError(Exception e) { - byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; - org.apache.thrift.TBase msg; - remove_token_result result = new remove_token_result(); - { - msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; - msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); - } - try { - fcall.sendResponse(fb,msg,msgType,seqid); - return; - } catch (Exception ex) { - LOGGER.error("Exception writing to internal frame buffer", ex); - } - fb.close(); - } - }; - } - - protected boolean isOneway() { - return false; - } - - public void start(I iface, remove_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { - iface.remove_token(args.token_identifier,resultHandler); - } - } - - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_token extends org.apache.thrift.AsyncProcessFunction { - public get_token() { - super("get_token"); - } - - public get_token_args getEmptyArgsInstance() { - return new get_token_args(); - } - - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { - final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(String o) { - get_token_result result = new get_token_result(); - result.success = o; - try { - fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); - return; - } catch (Exception e) { - LOGGER.error("Exception writing to internal frame buffer", e); - } - fb.close(); - } - public void onError(Exception e) { - byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; - org.apache.thrift.TBase msg; - get_token_result result = new get_token_result(); - { - msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; - msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); - } - try { - fcall.sendResponse(fb,msg,msgType,seqid); - return; - } catch (Exception ex) { - LOGGER.error("Exception writing to internal frame buffer", ex); - } - fb.close(); - } - }; - } - - protected boolean isOneway() { - return false; - } - - public void start(I iface, get_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { - iface.get_token(args.token_identifier,resultHandler); - } - } - - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_all_token_identifiers extends org.apache.thrift.AsyncProcessFunction> { - public get_all_token_identifiers() { - super("get_all_token_identifiers"); - } - - public get_all_token_identifiers_args getEmptyArgsInstance() { - return new get_all_token_identifiers_args(); - } - - public AsyncMethodCallback> getResultHandler(final AsyncFrameBuffer fb, final int seqid) { - final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback>() { - public void onComplete(List o) { - get_all_token_identifiers_result result = new get_all_token_identifiers_result(); - result.success = o; - try { - fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); - return; - } catch (Exception e) { - LOGGER.error("Exception writing to internal frame buffer", e); - } - fb.close(); - } - public void onError(Exception e) { - byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; - org.apache.thrift.TBase msg; - get_all_token_identifiers_result result = new get_all_token_identifiers_result(); - { - msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; - msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); - } - try { - fcall.sendResponse(fb,msg,msgType,seqid); - return; - } catch (Exception ex) { - LOGGER.error("Exception writing to internal frame buffer", ex); - } - fb.close(); - } - }; - } - - protected boolean isOneway() { - return false; - } - - public void start(I iface, get_all_token_identifiers_args args, org.apache.thrift.async.AsyncMethodCallback> resultHandler) throws TException { - iface.get_all_token_identifiers(resultHandler); + public void start(I iface, get_delegation_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.get_delegation_token(args.token_owner, args.renewer_kerberos_principal_name,resultHandler); } } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_master_key extends org.apache.thrift.AsyncProcessFunction { - public add_master_key() { - super("add_master_key"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class renew_delegation_token extends org.apache.thrift.AsyncProcessFunction { + public renew_delegation_token() { + super("renew_delegation_token"); } - public add_master_key_args getEmptyArgsInstance() { - return new add_master_key_args(); + public renew_delegation_token_args getEmptyArgsInstance() { + return new renew_delegation_token_args(); } - public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { final org.apache.thrift.AsyncProcessFunction fcall = this; - return new AsyncMethodCallback() { - public void onComplete(Integer o) { - add_master_key_result result = new add_master_key_result(); + return new AsyncMethodCallback() { + public void onComplete(Long o) { + renew_delegation_token_result result = new renew_delegation_token_result(); result.success = o; result.setSuccessIsSet(true); try { @@ -27720,7 +27549,327 @@ public void onComplete(Integer o) { public void onError(Exception e) { byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; org.apache.thrift.TBase msg; - add_master_key_result result = new add_master_key_result(); + renew_delegation_token_result result = new renew_delegation_token_result(); + if (e instanceof MetaException) { + result.o1 = (MetaException) e; + result.setO1IsSet(true); + msg = result; + } + else + { + msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; + msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); + } + try { + fcall.sendResponse(fb,msg,msgType,seqid); + return; + } catch (Exception ex) { + LOGGER.error("Exception writing to internal frame buffer", ex); + } + fb.close(); + } + }; + } + + protected boolean isOneway() { + return false; + } + + public void start(I iface, renew_delegation_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.renew_delegation_token(args.token_str_form,resultHandler); + } + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class cancel_delegation_token extends org.apache.thrift.AsyncProcessFunction { + public cancel_delegation_token() { + super("cancel_delegation_token"); + } + + public cancel_delegation_token_args getEmptyArgsInstance() { + return new cancel_delegation_token_args(); + } + + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + final org.apache.thrift.AsyncProcessFunction fcall = this; + return new AsyncMethodCallback() { + public void onComplete(Void o) { + cancel_delegation_token_result result = new cancel_delegation_token_result(); + try { + fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); + return; + } catch (Exception e) { + LOGGER.error("Exception writing to internal frame buffer", e); + } + fb.close(); + } + public void onError(Exception e) { + byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; + org.apache.thrift.TBase msg; + cancel_delegation_token_result result = new cancel_delegation_token_result(); + if (e instanceof MetaException) { + result.o1 = (MetaException) e; + result.setO1IsSet(true); + msg = result; + } + else + { + msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; + msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); + } + try { + fcall.sendResponse(fb,msg,msgType,seqid); + return; + } catch (Exception ex) { + LOGGER.error("Exception writing to internal frame buffer", ex); + } + fb.close(); + } + }; + } + + protected boolean isOneway() { + return false; + } + + public void start(I iface, cancel_delegation_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.cancel_delegation_token(args.token_str_form,resultHandler); + } + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_token extends org.apache.thrift.AsyncProcessFunction { + public add_token() { + super("add_token"); + } + + public add_token_args getEmptyArgsInstance() { + return new add_token_args(); + } + + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + final org.apache.thrift.AsyncProcessFunction fcall = this; + return new AsyncMethodCallback() { + public void onComplete(Boolean o) { + add_token_result result = new add_token_result(); + result.success = o; + result.setSuccessIsSet(true); + try { + fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); + return; + } catch (Exception e) { + LOGGER.error("Exception writing to internal frame buffer", e); + } + fb.close(); + } + public void onError(Exception e) { + byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; + org.apache.thrift.TBase msg; + add_token_result result = new add_token_result(); + { + msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; + msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); + } + try { + fcall.sendResponse(fb,msg,msgType,seqid); + return; + } catch (Exception ex) { + LOGGER.error("Exception writing to internal frame buffer", ex); + } + fb.close(); + } + }; + } + + protected boolean isOneway() { + return false; + } + + public void start(I iface, add_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.add_token(args.token_identifier, args.delegation_token,resultHandler); + } + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class remove_token extends org.apache.thrift.AsyncProcessFunction { + public remove_token() { + super("remove_token"); + } + + public remove_token_args getEmptyArgsInstance() { + return new remove_token_args(); + } + + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + final org.apache.thrift.AsyncProcessFunction fcall = this; + return new AsyncMethodCallback() { + public void onComplete(Boolean o) { + remove_token_result result = new remove_token_result(); + result.success = o; + result.setSuccessIsSet(true); + try { + fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); + return; + } catch (Exception e) { + LOGGER.error("Exception writing to internal frame buffer", e); + } + fb.close(); + } + public void onError(Exception e) { + byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; + org.apache.thrift.TBase msg; + remove_token_result result = new remove_token_result(); + { + msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; + msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); + } + try { + fcall.sendResponse(fb,msg,msgType,seqid); + return; + } catch (Exception ex) { + LOGGER.error("Exception writing to internal frame buffer", ex); + } + fb.close(); + } + }; + } + + protected boolean isOneway() { + return false; + } + + public void start(I iface, remove_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.remove_token(args.token_identifier,resultHandler); + } + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_token extends org.apache.thrift.AsyncProcessFunction { + public get_token() { + super("get_token"); + } + + public get_token_args getEmptyArgsInstance() { + return new get_token_args(); + } + + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + final org.apache.thrift.AsyncProcessFunction fcall = this; + return new AsyncMethodCallback() { + public void onComplete(String o) { + get_token_result result = new get_token_result(); + result.success = o; + try { + fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); + return; + } catch (Exception e) { + LOGGER.error("Exception writing to internal frame buffer", e); + } + fb.close(); + } + public void onError(Exception e) { + byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; + org.apache.thrift.TBase msg; + get_token_result result = new get_token_result(); + { + msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; + msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); + } + try { + fcall.sendResponse(fb,msg,msgType,seqid); + return; + } catch (Exception ex) { + LOGGER.error("Exception writing to internal frame buffer", ex); + } + fb.close(); + } + }; + } + + protected boolean isOneway() { + return false; + } + + public void start(I iface, get_token_args args, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws TException { + iface.get_token(args.token_identifier,resultHandler); + } + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_all_token_identifiers extends org.apache.thrift.AsyncProcessFunction> { + public get_all_token_identifiers() { + super("get_all_token_identifiers"); + } + + public get_all_token_identifiers_args getEmptyArgsInstance() { + return new get_all_token_identifiers_args(); + } + + public AsyncMethodCallback> getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + final org.apache.thrift.AsyncProcessFunction fcall = this; + return new AsyncMethodCallback>() { + public void onComplete(List o) { + get_all_token_identifiers_result result = new get_all_token_identifiers_result(); + result.success = o; + try { + fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); + return; + } catch (Exception e) { + LOGGER.error("Exception writing to internal frame buffer", e); + } + fb.close(); + } + public void onError(Exception e) { + byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; + org.apache.thrift.TBase msg; + get_all_token_identifiers_result result = new get_all_token_identifiers_result(); + { + msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION; + msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage()); + } + try { + fcall.sendResponse(fb,msg,msgType,seqid); + return; + } catch (Exception ex) { + LOGGER.error("Exception writing to internal frame buffer", ex); + } + fb.close(); + } + }; + } + + protected boolean isOneway() { + return false; + } + + public void start(I iface, get_all_token_identifiers_args args, org.apache.thrift.async.AsyncMethodCallback> resultHandler) throws TException { + iface.get_all_token_identifiers(resultHandler); + } + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_master_key extends org.apache.thrift.AsyncProcessFunction { + public add_master_key() { + super("add_master_key"); + } + + public add_master_key_args getEmptyArgsInstance() { + return new add_master_key_args(); + } + + public AsyncMethodCallback getResultHandler(final AsyncFrameBuffer fb, final int seqid) { + final org.apache.thrift.AsyncProcessFunction fcall = this; + return new AsyncMethodCallback() { + public void onComplete(Integer o) { + add_master_key_result result = new add_master_key_result(); + result.success = o; + result.setSuccessIsSet(true); + try { + fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid); + return; + } catch (Exception e) { + LOGGER.error("Exception writing to internal frame buffer", e); + } + fb.close(); + } + public void onError(Exception e) { + byte msgType = org.apache.thrift.protocol.TMessageType.REPLY; + org.apache.thrift.TBase msg; + add_master_key_result result = new add_master_key_result(); if (e instanceof MetaException) { result.o1 = (MetaException) e; result.setO1IsSet(true); @@ -166054,30 +166203,957 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("list_privileges_args("); + StringBuilder sb = new StringBuilder("list_privileges_args("); + boolean first = true; + + sb.append("principal_name:"); + if (this.principal_name == null) { + sb.append("null"); + } else { + sb.append(this.principal_name); + } + first = false; + if (!first) sb.append(", "); + sb.append("principal_type:"); + if (this.principal_type == null) { + sb.append("null"); + } else { + sb.append(this.principal_type); + } + first = false; + if (!first) sb.append(", "); + sb.append("hiveObject:"); + if (this.hiveObject == null) { + sb.append("null"); + } else { + sb.append(this.hiveObject); + } + first = false; + sb.append(")"); + return sb.toString(); + } + + public void validate() throws org.apache.thrift.TException { + // check for required fields + // check for sub-struct validity + if (hiveObject != null) { + hiveObject.validate(); + } + } + + private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { + try { + write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { + try { + read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private static class list_privileges_argsStandardSchemeFactory implements SchemeFactory { + public list_privileges_argsStandardScheme getScheme() { + return new list_privileges_argsStandardScheme(); + } + } + + private static class list_privileges_argsStandardScheme extends StandardScheme { + + public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_args struct) throws org.apache.thrift.TException { + org.apache.thrift.protocol.TField schemeField; + iprot.readStructBegin(); + while (true) + { + schemeField = iprot.readFieldBegin(); + if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { + break; + } + switch (schemeField.id) { + case 1: // PRINCIPAL_NAME + if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { + struct.principal_name = iprot.readString(); + struct.setPrincipal_nameIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + case 2: // PRINCIPAL_TYPE + if (schemeField.type == org.apache.thrift.protocol.TType.I32) { + struct.principal_type = org.apache.hadoop.hive.metastore.api.PrincipalType.findByValue(iprot.readI32()); + struct.setPrincipal_typeIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + case 3: // HIVE_OBJECT + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { + struct.hiveObject = new HiveObjectRef(); + struct.hiveObject.read(iprot); + struct.setHiveObjectIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + default: + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + iprot.readFieldEnd(); + } + iprot.readStructEnd(); + struct.validate(); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot, list_privileges_args struct) throws org.apache.thrift.TException { + struct.validate(); + + oprot.writeStructBegin(STRUCT_DESC); + if (struct.principal_name != null) { + oprot.writeFieldBegin(PRINCIPAL_NAME_FIELD_DESC); + oprot.writeString(struct.principal_name); + oprot.writeFieldEnd(); + } + if (struct.principal_type != null) { + oprot.writeFieldBegin(PRINCIPAL_TYPE_FIELD_DESC); + oprot.writeI32(struct.principal_type.getValue()); + oprot.writeFieldEnd(); + } + if (struct.hiveObject != null) { + oprot.writeFieldBegin(HIVE_OBJECT_FIELD_DESC); + struct.hiveObject.write(oprot); + oprot.writeFieldEnd(); + } + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + + } + + private static class list_privileges_argsTupleSchemeFactory implements SchemeFactory { + public list_privileges_argsTupleScheme getScheme() { + return new list_privileges_argsTupleScheme(); + } + } + + private static class list_privileges_argsTupleScheme extends TupleScheme { + + @Override + public void write(org.apache.thrift.protocol.TProtocol prot, list_privileges_args struct) throws org.apache.thrift.TException { + TTupleProtocol oprot = (TTupleProtocol) prot; + BitSet optionals = new BitSet(); + if (struct.isSetPrincipal_name()) { + optionals.set(0); + } + if (struct.isSetPrincipal_type()) { + optionals.set(1); + } + if (struct.isSetHiveObject()) { + optionals.set(2); + } + oprot.writeBitSet(optionals, 3); + if (struct.isSetPrincipal_name()) { + oprot.writeString(struct.principal_name); + } + if (struct.isSetPrincipal_type()) { + oprot.writeI32(struct.principal_type.getValue()); + } + if (struct.isSetHiveObject()) { + struct.hiveObject.write(oprot); + } + } + + @Override + public void read(org.apache.thrift.protocol.TProtocol prot, list_privileges_args struct) throws org.apache.thrift.TException { + TTupleProtocol iprot = (TTupleProtocol) prot; + BitSet incoming = iprot.readBitSet(3); + if (incoming.get(0)) { + struct.principal_name = iprot.readString(); + struct.setPrincipal_nameIsSet(true); + } + if (incoming.get(1)) { + struct.principal_type = org.apache.hadoop.hive.metastore.api.PrincipalType.findByValue(iprot.readI32()); + struct.setPrincipal_typeIsSet(true); + } + if (incoming.get(2)) { + struct.hiveObject = new HiveObjectRef(); + struct.hiveObject.read(iprot); + struct.setHiveObjectIsSet(true); + } + } + } + + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class list_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("list_privileges_result"); + + private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); + private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1); + + private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); + static { + schemes.put(StandardScheme.class, new list_privileges_resultStandardSchemeFactory()); + schemes.put(TupleScheme.class, new list_privileges_resultTupleSchemeFactory()); + } + + private List success; // required + private MetaException o1; // required + + /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ + public enum _Fields implements org.apache.thrift.TFieldIdEnum { + SUCCESS((short)0, "success"), + O1((short)1, "o1"); + + private static final Map byName = new HashMap(); + + static { + for (_Fields field : EnumSet.allOf(_Fields.class)) { + byName.put(field.getFieldName(), field); + } + } + + /** + * Find the _Fields constant that matches fieldId, or null if its not found. + */ + public static _Fields findByThriftId(int fieldId) { + switch(fieldId) { + case 0: // SUCCESS + return SUCCESS; + case 1: // O1 + return O1; + default: + return null; + } + } + + /** + * Find the _Fields constant that matches fieldId, throwing an exception + * if it is not found. + */ + public static _Fields findByThriftIdOrThrow(int fieldId) { + _Fields fields = findByThriftId(fieldId); + if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); + return fields; + } + + /** + * Find the _Fields constant that matches name, or null if its not found. + */ + public static _Fields findByName(String name) { + return byName.get(name); + } + + private final short _thriftId; + private final String _fieldName; + + _Fields(short thriftId, String fieldName) { + _thriftId = thriftId; + _fieldName = fieldName; + } + + public short getThriftFieldId() { + return _thriftId; + } + + public String getFieldName() { + return _fieldName; + } + } + + // isset id assignments + public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; + static { + Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); + tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, + new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HiveObjectPrivilege.class)))); + tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); + metaDataMap = Collections.unmodifiableMap(tmpMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(list_privileges_result.class, metaDataMap); + } + + public list_privileges_result() { + } + + public list_privileges_result( + List success, + MetaException o1) + { + this(); + this.success = success; + this.o1 = o1; + } + + /** + * Performs a deep copy on other. + */ + public list_privileges_result(list_privileges_result other) { + if (other.isSetSuccess()) { + List __this__success = new ArrayList(other.success.size()); + for (HiveObjectPrivilege other_element : other.success) { + __this__success.add(new HiveObjectPrivilege(other_element)); + } + this.success = __this__success; + } + if (other.isSetO1()) { + this.o1 = new MetaException(other.o1); + } + } + + public list_privileges_result deepCopy() { + return new list_privileges_result(this); + } + + @Override + public void clear() { + this.success = null; + this.o1 = null; + } + + public int getSuccessSize() { + return (this.success == null) ? 0 : this.success.size(); + } + + public java.util.Iterator getSuccessIterator() { + return (this.success == null) ? null : this.success.iterator(); + } + + public void addToSuccess(HiveObjectPrivilege elem) { + if (this.success == null) { + this.success = new ArrayList(); + } + this.success.add(elem); + } + + public List getSuccess() { + return this.success; + } + + public void setSuccess(List success) { + this.success = success; + } + + public void unsetSuccess() { + this.success = null; + } + + /** Returns true if field success is set (has been assigned a value) and false otherwise */ + public boolean isSetSuccess() { + return this.success != null; + } + + public void setSuccessIsSet(boolean value) { + if (!value) { + this.success = null; + } + } + + public MetaException getO1() { + return this.o1; + } + + public void setO1(MetaException o1) { + this.o1 = o1; + } + + public void unsetO1() { + this.o1 = null; + } + + /** Returns true if field o1 is set (has been assigned a value) and false otherwise */ + public boolean isSetO1() { + return this.o1 != null; + } + + public void setO1IsSet(boolean value) { + if (!value) { + this.o1 = null; + } + } + + public void setFieldValue(_Fields field, Object value) { + switch (field) { + case SUCCESS: + if (value == null) { + unsetSuccess(); + } else { + setSuccess((List)value); + } + break; + + case O1: + if (value == null) { + unsetO1(); + } else { + setO1((MetaException)value); + } + break; + + } + } + + public Object getFieldValue(_Fields field) { + switch (field) { + case SUCCESS: + return getSuccess(); + + case O1: + return getO1(); + + } + throw new IllegalStateException(); + } + + /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ + public boolean isSet(_Fields field) { + if (field == null) { + throw new IllegalArgumentException(); + } + + switch (field) { + case SUCCESS: + return isSetSuccess(); + case O1: + return isSetO1(); + } + throw new IllegalStateException(); + } + + @Override + public boolean equals(Object that) { + if (that == null) + return false; + if (that instanceof list_privileges_result) + return this.equals((list_privileges_result)that); + return false; + } + + public boolean equals(list_privileges_result that) { + if (that == null) + return false; + + boolean this_present_success = true && this.isSetSuccess(); + boolean that_present_success = true && that.isSetSuccess(); + if (this_present_success || that_present_success) { + if (!(this_present_success && that_present_success)) + return false; + if (!this.success.equals(that.success)) + return false; + } + + boolean this_present_o1 = true && this.isSetO1(); + boolean that_present_o1 = true && that.isSetO1(); + if (this_present_o1 || that_present_o1) { + if (!(this_present_o1 && that_present_o1)) + return false; + if (!this.o1.equals(that.o1)) + return false; + } + + return true; + } + + @Override + public int hashCode() { + List list = new ArrayList(); + + boolean present_success = true && (isSetSuccess()); + list.add(present_success); + if (present_success) + list.add(success); + + boolean present_o1 = true && (isSetO1()); + list.add(present_o1); + if (present_o1) + list.add(o1); + + return list.hashCode(); + } + + @Override + public int compareTo(list_privileges_result other) { + if (!getClass().equals(other.getClass())) { + return getClass().getName().compareTo(other.getClass().getName()); + } + + int lastComparison = 0; + + lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetSuccess()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success); + if (lastComparison != 0) { + return lastComparison; + } + } + lastComparison = Boolean.valueOf(isSetO1()).compareTo(other.isSetO1()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetO1()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o1, other.o1); + if (lastComparison != 0) { + return lastComparison; + } + } + return 0; + } + + public _Fields fieldForId(int fieldId) { + return _Fields.findByThriftId(fieldId); + } + + public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { + schemes.get(iprot.getScheme()).getScheme().read(iprot, this); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { + schemes.get(oprot.getScheme()).getScheme().write(oprot, this); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("list_privileges_result("); + boolean first = true; + + sb.append("success:"); + if (this.success == null) { + sb.append("null"); + } else { + sb.append(this.success); + } + first = false; + if (!first) sb.append(", "); + sb.append("o1:"); + if (this.o1 == null) { + sb.append("null"); + } else { + sb.append(this.o1); + } + first = false; + sb.append(")"); + return sb.toString(); + } + + public void validate() throws org.apache.thrift.TException { + // check for required fields + // check for sub-struct validity + } + + private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { + try { + write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { + try { + read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private static class list_privileges_resultStandardSchemeFactory implements SchemeFactory { + public list_privileges_resultStandardScheme getScheme() { + return new list_privileges_resultStandardScheme(); + } + } + + private static class list_privileges_resultStandardScheme extends StandardScheme { + + public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_result struct) throws org.apache.thrift.TException { + org.apache.thrift.protocol.TField schemeField; + iprot.readStructBegin(); + while (true) + { + schemeField = iprot.readFieldBegin(); + if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { + break; + } + switch (schemeField.id) { + case 0: // SUCCESS + if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { + { + org.apache.thrift.protocol.TList _list1446 = iprot.readListBegin(); + struct.success = new ArrayList(_list1446.size); + HiveObjectPrivilege _elem1447; + for (int _i1448 = 0; _i1448 < _list1446.size; ++_i1448) + { + _elem1447 = new HiveObjectPrivilege(); + _elem1447.read(iprot); + struct.success.add(_elem1447); + } + iprot.readListEnd(); + } + struct.setSuccessIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + case 1: // O1 + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { + struct.o1 = new MetaException(); + struct.o1.read(iprot); + struct.setO1IsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + default: + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + iprot.readFieldEnd(); + } + iprot.readStructEnd(); + struct.validate(); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot, list_privileges_result struct) throws org.apache.thrift.TException { + struct.validate(); + + oprot.writeStructBegin(STRUCT_DESC); + if (struct.success != null) { + oprot.writeFieldBegin(SUCCESS_FIELD_DESC); + { + oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); + for (HiveObjectPrivilege _iter1449 : struct.success) + { + _iter1449.write(oprot); + } + oprot.writeListEnd(); + } + oprot.writeFieldEnd(); + } + if (struct.o1 != null) { + oprot.writeFieldBegin(O1_FIELD_DESC); + struct.o1.write(oprot); + oprot.writeFieldEnd(); + } + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + + } + + private static class list_privileges_resultTupleSchemeFactory implements SchemeFactory { + public list_privileges_resultTupleScheme getScheme() { + return new list_privileges_resultTupleScheme(); + } + } + + private static class list_privileges_resultTupleScheme extends TupleScheme { + + @Override + public void write(org.apache.thrift.protocol.TProtocol prot, list_privileges_result struct) throws org.apache.thrift.TException { + TTupleProtocol oprot = (TTupleProtocol) prot; + BitSet optionals = new BitSet(); + if (struct.isSetSuccess()) { + optionals.set(0); + } + if (struct.isSetO1()) { + optionals.set(1); + } + oprot.writeBitSet(optionals, 2); + if (struct.isSetSuccess()) { + { + oprot.writeI32(struct.success.size()); + for (HiveObjectPrivilege _iter1450 : struct.success) + { + _iter1450.write(oprot); + } + } + } + if (struct.isSetO1()) { + struct.o1.write(oprot); + } + } + + @Override + public void read(org.apache.thrift.protocol.TProtocol prot, list_privileges_result struct) throws org.apache.thrift.TException { + TTupleProtocol iprot = (TTupleProtocol) prot; + BitSet incoming = iprot.readBitSet(2); + if (incoming.get(0)) { + { + org.apache.thrift.protocol.TList _list1451 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); + struct.success = new ArrayList(_list1451.size); + HiveObjectPrivilege _elem1452; + for (int _i1453 = 0; _i1453 < _list1451.size; ++_i1453) + { + _elem1452 = new HiveObjectPrivilege(); + _elem1452.read(iprot); + struct.success.add(_elem1452); + } + } + struct.setSuccessIsSet(true); + } + if (incoming.get(1)) { + struct.o1 = new MetaException(); + struct.o1.read(iprot); + struct.setO1IsSet(true); + } + } + } + + } + + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_privileges_args"); + + private static final org.apache.thrift.protocol.TField PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("privileges", org.apache.thrift.protocol.TType.STRUCT, (short)1); + + private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); + static { + schemes.put(StandardScheme.class, new grant_privileges_argsStandardSchemeFactory()); + schemes.put(TupleScheme.class, new grant_privileges_argsTupleSchemeFactory()); + } + + private PrivilegeBag privileges; // required + + /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ + public enum _Fields implements org.apache.thrift.TFieldIdEnum { + PRIVILEGES((short)1, "privileges"); + + private static final Map byName = new HashMap(); + + static { + for (_Fields field : EnumSet.allOf(_Fields.class)) { + byName.put(field.getFieldName(), field); + } + } + + /** + * Find the _Fields constant that matches fieldId, or null if its not found. + */ + public static _Fields findByThriftId(int fieldId) { + switch(fieldId) { + case 1: // PRIVILEGES + return PRIVILEGES; + default: + return null; + } + } + + /** + * Find the _Fields constant that matches fieldId, throwing an exception + * if it is not found. + */ + public static _Fields findByThriftIdOrThrow(int fieldId) { + _Fields fields = findByThriftId(fieldId); + if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); + return fields; + } + + /** + * Find the _Fields constant that matches name, or null if its not found. + */ + public static _Fields findByName(String name) { + return byName.get(name); + } + + private final short _thriftId; + private final String _fieldName; + + _Fields(short thriftId, String fieldName) { + _thriftId = thriftId; + _fieldName = fieldName; + } + + public short getThriftFieldId() { + return _thriftId; + } + + public String getFieldName() { + return _fieldName; + } + } + + // isset id assignments + public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; + static { + Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); + tmpMap.put(_Fields.PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("privileges", org.apache.thrift.TFieldRequirementType.DEFAULT, + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrivilegeBag.class))); + metaDataMap = Collections.unmodifiableMap(tmpMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_privileges_args.class, metaDataMap); + } + + public grant_privileges_args() { + } + + public grant_privileges_args( + PrivilegeBag privileges) + { + this(); + this.privileges = privileges; + } + + /** + * Performs a deep copy on other. + */ + public grant_privileges_args(grant_privileges_args other) { + if (other.isSetPrivileges()) { + this.privileges = new PrivilegeBag(other.privileges); + } + } + + public grant_privileges_args deepCopy() { + return new grant_privileges_args(this); + } + + @Override + public void clear() { + this.privileges = null; + } + + public PrivilegeBag getPrivileges() { + return this.privileges; + } + + public void setPrivileges(PrivilegeBag privileges) { + this.privileges = privileges; + } + + public void unsetPrivileges() { + this.privileges = null; + } + + /** Returns true if field privileges is set (has been assigned a value) and false otherwise */ + public boolean isSetPrivileges() { + return this.privileges != null; + } + + public void setPrivilegesIsSet(boolean value) { + if (!value) { + this.privileges = null; + } + } + + public void setFieldValue(_Fields field, Object value) { + switch (field) { + case PRIVILEGES: + if (value == null) { + unsetPrivileges(); + } else { + setPrivileges((PrivilegeBag)value); + } + break; + + } + } + + public Object getFieldValue(_Fields field) { + switch (field) { + case PRIVILEGES: + return getPrivileges(); + + } + throw new IllegalStateException(); + } + + /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ + public boolean isSet(_Fields field) { + if (field == null) { + throw new IllegalArgumentException(); + } + + switch (field) { + case PRIVILEGES: + return isSetPrivileges(); + } + throw new IllegalStateException(); + } + + @Override + public boolean equals(Object that) { + if (that == null) + return false; + if (that instanceof grant_privileges_args) + return this.equals((grant_privileges_args)that); + return false; + } + + public boolean equals(grant_privileges_args that) { + if (that == null) + return false; + + boolean this_present_privileges = true && this.isSetPrivileges(); + boolean that_present_privileges = true && that.isSetPrivileges(); + if (this_present_privileges || that_present_privileges) { + if (!(this_present_privileges && that_present_privileges)) + return false; + if (!this.privileges.equals(that.privileges)) + return false; + } + + return true; + } + + @Override + public int hashCode() { + List list = new ArrayList(); + + boolean present_privileges = true && (isSetPrivileges()); + list.add(present_privileges); + if (present_privileges) + list.add(privileges); + + return list.hashCode(); + } + + @Override + public int compareTo(grant_privileges_args other) { + if (!getClass().equals(other.getClass())) { + return getClass().getName().compareTo(other.getClass().getName()); + } + + int lastComparison = 0; + + lastComparison = Boolean.valueOf(isSetPrivileges()).compareTo(other.isSetPrivileges()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetPrivileges()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.privileges, other.privileges); + if (lastComparison != 0) { + return lastComparison; + } + } + return 0; + } + + public _Fields fieldForId(int fieldId) { + return _Fields.findByThriftId(fieldId); + } + + public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { + schemes.get(iprot.getScheme()).getScheme().read(iprot, this); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { + schemes.get(oprot.getScheme()).getScheme().write(oprot, this); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("grant_privileges_args("); boolean first = true; - sb.append("principal_name:"); - if (this.principal_name == null) { - sb.append("null"); - } else { - sb.append(this.principal_name); - } - first = false; - if (!first) sb.append(", "); - sb.append("principal_type:"); - if (this.principal_type == null) { - sb.append("null"); - } else { - sb.append(this.principal_type); - } - first = false; - if (!first) sb.append(", "); - sb.append("hiveObject:"); - if (this.hiveObject == null) { + sb.append("privileges:"); + if (this.privileges == null) { sb.append("null"); } else { - sb.append(this.hiveObject); + sb.append(this.privileges); } first = false; sb.append(")"); @@ -166087,8 +167163,8 @@ public String toString() { public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity - if (hiveObject != null) { - hiveObject.validate(); + if (privileges != null) { + privileges.validate(); } } @@ -166108,15 +167184,15 @@ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException } } - private static class list_privileges_argsStandardSchemeFactory implements SchemeFactory { - public list_privileges_argsStandardScheme getScheme() { - return new list_privileges_argsStandardScheme(); + private static class grant_privileges_argsStandardSchemeFactory implements SchemeFactory { + public grant_privileges_argsStandardScheme getScheme() { + return new grant_privileges_argsStandardScheme(); } } - private static class list_privileges_argsStandardScheme extends StandardScheme { + private static class grant_privileges_argsStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, grant_privileges_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -166126,27 +167202,11 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_arg break; } switch (schemeField.id) { - case 1: // PRINCIPAL_NAME - if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { - struct.principal_name = iprot.readString(); - struct.setPrincipal_nameIsSet(true); - } else { - org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); - } - break; - case 2: // PRINCIPAL_TYPE - if (schemeField.type == org.apache.thrift.protocol.TType.I32) { - struct.principal_type = org.apache.hadoop.hive.metastore.api.PrincipalType.findByValue(iprot.readI32()); - struct.setPrincipal_typeIsSet(true); - } else { - org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); - } - break; - case 3: // HIVE_OBJECT + case 1: // PRIVILEGES if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { - struct.hiveObject = new HiveObjectRef(); - struct.hiveObject.read(iprot); - struct.setHiveObjectIsSet(true); + struct.privileges = new PrivilegeBag(); + struct.privileges.read(iprot); + struct.setPrivilegesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -166160,23 +167220,13 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_arg struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, list_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, grant_privileges_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); - if (struct.principal_name != null) { - oprot.writeFieldBegin(PRINCIPAL_NAME_FIELD_DESC); - oprot.writeString(struct.principal_name); - oprot.writeFieldEnd(); - } - if (struct.principal_type != null) { - oprot.writeFieldBegin(PRINCIPAL_TYPE_FIELD_DESC); - oprot.writeI32(struct.principal_type.getValue()); - oprot.writeFieldEnd(); - } - if (struct.hiveObject != null) { - oprot.writeFieldBegin(HIVE_OBJECT_FIELD_DESC); - struct.hiveObject.write(oprot); + if (struct.privileges != null) { + oprot.writeFieldBegin(PRIVILEGES_FIELD_DESC); + struct.privileges.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); @@ -166185,74 +167235,54 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, list_privileges_ar } - private static class list_privileges_argsTupleSchemeFactory implements SchemeFactory { - public list_privileges_argsTupleScheme getScheme() { - return new list_privileges_argsTupleScheme(); + private static class grant_privileges_argsTupleSchemeFactory implements SchemeFactory { + public grant_privileges_argsTupleScheme getScheme() { + return new grant_privileges_argsTupleScheme(); } } - private static class list_privileges_argsTupleScheme extends TupleScheme { + private static class grant_privileges_argsTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, list_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, grant_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); - if (struct.isSetPrincipal_name()) { + if (struct.isSetPrivileges()) { optionals.set(0); } - if (struct.isSetPrincipal_type()) { - optionals.set(1); - } - if (struct.isSetHiveObject()) { - optionals.set(2); - } - oprot.writeBitSet(optionals, 3); - if (struct.isSetPrincipal_name()) { - oprot.writeString(struct.principal_name); - } - if (struct.isSetPrincipal_type()) { - oprot.writeI32(struct.principal_type.getValue()); - } - if (struct.isSetHiveObject()) { - struct.hiveObject.write(oprot); + oprot.writeBitSet(optionals, 1); + if (struct.isSetPrivileges()) { + struct.privileges.write(oprot); } } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, list_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - BitSet incoming = iprot.readBitSet(3); + BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { - struct.principal_name = iprot.readString(); - struct.setPrincipal_nameIsSet(true); - } - if (incoming.get(1)) { - struct.principal_type = org.apache.hadoop.hive.metastore.api.PrincipalType.findByValue(iprot.readI32()); - struct.setPrincipal_typeIsSet(true); - } - if (incoming.get(2)) { - struct.hiveObject = new HiveObjectRef(); - struct.hiveObject.read(iprot); - struct.setHiveObjectIsSet(true); + struct.privileges = new PrivilegeBag(); + struct.privileges.read(iprot); + struct.setPrivilegesIsSet(true); } } } } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class list_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("list_privileges_result"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_privileges_result"); - private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0); + private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0); private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new list_privileges_resultStandardSchemeFactory()); - schemes.put(TupleScheme.class, new list_privileges_resultTupleSchemeFactory()); + schemes.put(StandardScheme.class, new grant_privileges_resultStandardSchemeFactory()); + schemes.put(TupleScheme.class, new grant_privileges_resultTupleSchemeFactory()); } - private List success; // required + private boolean success; // required private MetaException o1; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ @@ -166317,92 +167347,74 @@ public String getFieldName() { } // isset id assignments + private static final int __SUCCESS_ISSET_ID = 0; + private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, - new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, - new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HiveObjectPrivilege.class)))); + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(list_privileges_result.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_privileges_result.class, metaDataMap); } - public list_privileges_result() { + public grant_privileges_result() { } - public list_privileges_result( - List success, + public grant_privileges_result( + boolean success, MetaException o1) { this(); this.success = success; + setSuccessIsSet(true); this.o1 = o1; } /** * Performs a deep copy on other. */ - public list_privileges_result(list_privileges_result other) { - if (other.isSetSuccess()) { - List __this__success = new ArrayList(other.success.size()); - for (HiveObjectPrivilege other_element : other.success) { - __this__success.add(new HiveObjectPrivilege(other_element)); - } - this.success = __this__success; - } + public grant_privileges_result(grant_privileges_result other) { + __isset_bitfield = other.__isset_bitfield; + this.success = other.success; if (other.isSetO1()) { this.o1 = new MetaException(other.o1); } } - public list_privileges_result deepCopy() { - return new list_privileges_result(this); + public grant_privileges_result deepCopy() { + return new grant_privileges_result(this); } @Override public void clear() { - this.success = null; + setSuccessIsSet(false); + this.success = false; this.o1 = null; } - public int getSuccessSize() { - return (this.success == null) ? 0 : this.success.size(); - } - - public java.util.Iterator getSuccessIterator() { - return (this.success == null) ? null : this.success.iterator(); - } - - public void addToSuccess(HiveObjectPrivilege elem) { - if (this.success == null) { - this.success = new ArrayList(); - } - this.success.add(elem); - } - - public List getSuccess() { + public boolean isSuccess() { return this.success; } - public void setSuccess(List success) { + public void setSuccess(boolean success) { this.success = success; + setSuccessIsSet(true); } public void unsetSuccess() { - this.success = null; + __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID); } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean isSetSuccess() { - return this.success != null; + return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID); } public void setSuccessIsSet(boolean value) { - if (!value) { - this.success = null; - } + __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value); } public MetaException getO1() { @@ -166434,7 +167446,7 @@ public void setFieldValue(_Fields field, Object value) { if (value == null) { unsetSuccess(); } else { - setSuccess((List)value); + setSuccess((Boolean)value); } break; @@ -166452,7 +167464,7 @@ public void setFieldValue(_Fields field, Object value) { public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: - return getSuccess(); + return isSuccess(); case O1: return getO1(); @@ -166480,21 +167492,21 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof list_privileges_result) - return this.equals((list_privileges_result)that); + if (that instanceof grant_privileges_result) + return this.equals((grant_privileges_result)that); return false; } - public boolean equals(list_privileges_result that) { + public boolean equals(grant_privileges_result that) { if (that == null) return false; - boolean this_present_success = true && this.isSetSuccess(); - boolean that_present_success = true && that.isSetSuccess(); + boolean this_present_success = true; + boolean that_present_success = true; if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; - if (!this.success.equals(that.success)) + if (this.success != that.success) return false; } @@ -166514,7 +167526,7 @@ public boolean equals(list_privileges_result that) { public int hashCode() { List list = new ArrayList(); - boolean present_success = true && (isSetSuccess()); + boolean present_success = true; list.add(present_success); if (present_success) list.add(success); @@ -166528,7 +167540,7 @@ public int hashCode() { } @Override - public int compareTo(list_privileges_result other) { + public int compareTo(grant_privileges_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } @@ -166572,15 +167584,11 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("list_privileges_result("); + StringBuilder sb = new StringBuilder("grant_privileges_result("); boolean first = true; sb.append("success:"); - if (this.success == null) { - sb.append("null"); - } else { - sb.append(this.success); - } + sb.append(this.success); first = false; if (!first) sb.append(", "); sb.append("o1:"); @@ -166609,21 +167617,23 @@ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOExcept private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { + // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. + __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } - private static class list_privileges_resultStandardSchemeFactory implements SchemeFactory { - public list_privileges_resultStandardScheme getScheme() { - return new list_privileges_resultStandardScheme(); + private static class grant_privileges_resultStandardSchemeFactory implements SchemeFactory { + public grant_privileges_resultStandardScheme getScheme() { + return new grant_privileges_resultStandardScheme(); } } - private static class list_privileges_resultStandardScheme extends StandardScheme { + private static class grant_privileges_resultStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, grant_privileges_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -166634,19 +167644,8 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_res } switch (schemeField.id) { case 0: // SUCCESS - if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { - { - org.apache.thrift.protocol.TList _list1446 = iprot.readListBegin(); - struct.success = new ArrayList(_list1446.size); - HiveObjectPrivilege _elem1447; - for (int _i1448 = 0; _i1448 < _list1446.size; ++_i1448) - { - _elem1447 = new HiveObjectPrivilege(); - _elem1447.read(iprot); - struct.success.add(_elem1447); - } - iprot.readListEnd(); - } + if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { + struct.success = iprot.readBool(); struct.setSuccessIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); @@ -166670,20 +167669,13 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, list_privileges_res struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, list_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, grant_privileges_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); - if (struct.success != null) { + if (struct.isSetSuccess()) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); - { - oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size())); - for (HiveObjectPrivilege _iter1449 : struct.success) - { - _iter1449.write(oprot); - } - oprot.writeListEnd(); - } + oprot.writeBool(struct.success); oprot.writeFieldEnd(); } if (struct.o1 != null) { @@ -166697,16 +167689,16 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, list_privileges_re } - private static class list_privileges_resultTupleSchemeFactory implements SchemeFactory { - public list_privileges_resultTupleScheme getScheme() { - return new list_privileges_resultTupleScheme(); + private static class grant_privileges_resultTupleSchemeFactory implements SchemeFactory { + public grant_privileges_resultTupleScheme getScheme() { + return new grant_privileges_resultTupleScheme(); } } - private static class list_privileges_resultTupleScheme extends TupleScheme { + private static class grant_privileges_resultTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, list_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, grant_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetSuccess()) { @@ -166717,13 +167709,7 @@ public void write(org.apache.thrift.protocol.TProtocol prot, list_privileges_res } oprot.writeBitSet(optionals, 2); if (struct.isSetSuccess()) { - { - oprot.writeI32(struct.success.size()); - for (HiveObjectPrivilege _iter1450 : struct.success) - { - _iter1450.write(oprot); - } - } + oprot.writeBool(struct.success); } if (struct.isSetO1()) { struct.o1.write(oprot); @@ -166731,21 +167717,11 @@ public void write(org.apache.thrift.protocol.TProtocol prot, list_privileges_res } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, list_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { - { - org.apache.thrift.protocol.TList _list1451 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); - struct.success = new ArrayList(_list1451.size); - HiveObjectPrivilege _elem1452; - for (int _i1453 = 0; _i1453 < _list1451.size; ++_i1453) - { - _elem1452 = new HiveObjectPrivilege(); - _elem1452.read(iprot); - struct.success.add(_elem1452); - } - } + struct.success = iprot.readBool(); struct.setSuccessIsSet(true); } if (incoming.get(1)) { @@ -166758,15 +167734,15 @@ public void read(org.apache.thrift.protocol.TProtocol prot, list_privileges_resu } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_privileges_args"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class revoke_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("revoke_privileges_args"); private static final org.apache.thrift.protocol.TField PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("privileges", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new grant_privileges_argsStandardSchemeFactory()); - schemes.put(TupleScheme.class, new grant_privileges_argsTupleSchemeFactory()); + schemes.put(StandardScheme.class, new revoke_privileges_argsStandardSchemeFactory()); + schemes.put(TupleScheme.class, new revoke_privileges_argsTupleSchemeFactory()); } private PrivilegeBag privileges; // required @@ -166836,13 +167812,13 @@ public String getFieldName() { tmpMap.put(_Fields.PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("privileges", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrivilegeBag.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_privileges_args.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(revoke_privileges_args.class, metaDataMap); } - public grant_privileges_args() { + public revoke_privileges_args() { } - public grant_privileges_args( + public revoke_privileges_args( PrivilegeBag privileges) { this(); @@ -166852,14 +167828,14 @@ public grant_privileges_args( /** * Performs a deep copy on other. */ - public grant_privileges_args(grant_privileges_args other) { + public revoke_privileges_args(revoke_privileges_args other) { if (other.isSetPrivileges()) { this.privileges = new PrivilegeBag(other.privileges); } } - public grant_privileges_args deepCopy() { - return new grant_privileges_args(this); + public revoke_privileges_args deepCopy() { + return new revoke_privileges_args(this); } @Override @@ -166929,12 +167905,12 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof grant_privileges_args) - return this.equals((grant_privileges_args)that); + if (that instanceof revoke_privileges_args) + return this.equals((revoke_privileges_args)that); return false; } - public boolean equals(grant_privileges_args that) { + public boolean equals(revoke_privileges_args that) { if (that == null) return false; @@ -166963,7 +167939,7 @@ public int hashCode() { } @Override - public int compareTo(grant_privileges_args other) { + public int compareTo(revoke_privileges_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } @@ -166997,7 +167973,7 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("grant_privileges_args("); + StringBuilder sb = new StringBuilder("revoke_privileges_args("); boolean first = true; sb.append("privileges:"); @@ -167035,15 +168011,15 @@ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException } } - private static class grant_privileges_argsStandardSchemeFactory implements SchemeFactory { - public grant_privileges_argsStandardScheme getScheme() { - return new grant_privileges_argsStandardScheme(); + private static class revoke_privileges_argsStandardSchemeFactory implements SchemeFactory { + public revoke_privileges_argsStandardScheme getScheme() { + return new revoke_privileges_argsStandardScheme(); } } - private static class grant_privileges_argsStandardScheme extends StandardScheme { + private static class revoke_privileges_argsStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, grant_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -167071,7 +168047,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, grant_privileges_ar struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, grant_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, revoke_privileges_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); @@ -167086,16 +168062,16 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, grant_privileges_a } - private static class grant_privileges_argsTupleSchemeFactory implements SchemeFactory { - public grant_privileges_argsTupleScheme getScheme() { - return new grant_privileges_argsTupleScheme(); + private static class revoke_privileges_argsTupleSchemeFactory implements SchemeFactory { + public revoke_privileges_argsTupleScheme getScheme() { + return new revoke_privileges_argsTupleScheme(); } } - private static class grant_privileges_argsTupleScheme extends TupleScheme { + private static class revoke_privileges_argsTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, grant_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetPrivileges()) { @@ -167108,7 +168084,7 @@ public void write(org.apache.thrift.protocol.TProtocol prot, grant_privileges_ar } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { @@ -167121,16 +168097,16 @@ public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_arg } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_privileges_result"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class revoke_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("revoke_privileges_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0); private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new grant_privileges_resultStandardSchemeFactory()); - schemes.put(TupleScheme.class, new grant_privileges_resultTupleSchemeFactory()); + schemes.put(StandardScheme.class, new revoke_privileges_resultStandardSchemeFactory()); + schemes.put(TupleScheme.class, new revoke_privileges_resultTupleSchemeFactory()); } private boolean success; // required @@ -167208,13 +168184,13 @@ public String getFieldName() { tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_privileges_result.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(revoke_privileges_result.class, metaDataMap); } - public grant_privileges_result() { + public revoke_privileges_result() { } - public grant_privileges_result( + public revoke_privileges_result( boolean success, MetaException o1) { @@ -167227,7 +168203,7 @@ public grant_privileges_result( /** * Performs a deep copy on other. */ - public grant_privileges_result(grant_privileges_result other) { + public revoke_privileges_result(revoke_privileges_result other) { __isset_bitfield = other.__isset_bitfield; this.success = other.success; if (other.isSetO1()) { @@ -167235,8 +168211,8 @@ public grant_privileges_result(grant_privileges_result other) { } } - public grant_privileges_result deepCopy() { - return new grant_privileges_result(this); + public revoke_privileges_result deepCopy() { + return new revoke_privileges_result(this); } @Override @@ -167343,12 +168319,12 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof grant_privileges_result) - return this.equals((grant_privileges_result)that); + if (that instanceof revoke_privileges_result) + return this.equals((revoke_privileges_result)that); return false; } - public boolean equals(grant_privileges_result that) { + public boolean equals(revoke_privileges_result that) { if (that == null) return false; @@ -167391,7 +168367,7 @@ public int hashCode() { } @Override - public int compareTo(grant_privileges_result other) { + public int compareTo(revoke_privileges_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } @@ -167435,7 +168411,7 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("grant_privileges_result("); + StringBuilder sb = new StringBuilder("revoke_privileges_result("); boolean first = true; sb.append("success:"); @@ -167476,15 +168452,15 @@ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException } } - private static class grant_privileges_resultStandardSchemeFactory implements SchemeFactory { - public grant_privileges_resultStandardScheme getScheme() { - return new grant_privileges_resultStandardScheme(); + private static class revoke_privileges_resultStandardSchemeFactory implements SchemeFactory { + public revoke_privileges_resultStandardScheme getScheme() { + return new revoke_privileges_resultStandardScheme(); } } - private static class grant_privileges_resultStandardScheme extends StandardScheme { + private static class revoke_privileges_resultStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, grant_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -167520,7 +168496,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, grant_privileges_re struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, grant_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, revoke_privileges_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); @@ -167540,16 +168516,16 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, grant_privileges_r } - private static class grant_privileges_resultTupleSchemeFactory implements SchemeFactory { - public grant_privileges_resultTupleScheme getScheme() { - return new grant_privileges_resultTupleScheme(); + private static class revoke_privileges_resultTupleSchemeFactory implements SchemeFactory { + public revoke_privileges_resultTupleScheme getScheme() { + return new revoke_privileges_resultTupleScheme(); } } - private static class grant_privileges_resultTupleScheme extends TupleScheme { + private static class revoke_privileges_resultTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, grant_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetSuccess()) { @@ -167568,7 +168544,7 @@ public void write(org.apache.thrift.protocol.TProtocol prot, grant_privileges_re } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { @@ -167585,22 +168561,22 @@ public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_res } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class revoke_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("revoke_privileges_args"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_revoke_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_revoke_privileges_args"); - private static final org.apache.thrift.protocol.TField PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("privileges", org.apache.thrift.protocol.TType.STRUCT, (short)1); + private static final org.apache.thrift.protocol.TField REQUEST_FIELD_DESC = new org.apache.thrift.protocol.TField("request", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new revoke_privileges_argsStandardSchemeFactory()); - schemes.put(TupleScheme.class, new revoke_privileges_argsTupleSchemeFactory()); + schemes.put(StandardScheme.class, new grant_revoke_privileges_argsStandardSchemeFactory()); + schemes.put(TupleScheme.class, new grant_revoke_privileges_argsTupleSchemeFactory()); } - private PrivilegeBag privileges; // required + private GrantRevokePrivilegeRequest request; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { - PRIVILEGES((short)1, "privileges"); + REQUEST((short)1, "request"); private static final Map byName = new HashMap(); @@ -167615,8 +168591,8 @@ public void read(org.apache.thrift.protocol.TProtocol prot, grant_privileges_res */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { - case 1: // PRIVILEGES - return PRIVILEGES; + case 1: // REQUEST + return REQUEST; default: return null; } @@ -167660,70 +168636,70 @@ public String getFieldName() { public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); - tmpMap.put(_Fields.PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("privileges", org.apache.thrift.TFieldRequirementType.DEFAULT, - new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrivilegeBag.class))); + tmpMap.put(_Fields.REQUEST, new org.apache.thrift.meta_data.FieldMetaData("request", org.apache.thrift.TFieldRequirementType.DEFAULT, + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GrantRevokePrivilegeRequest.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(revoke_privileges_args.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_revoke_privileges_args.class, metaDataMap); } - public revoke_privileges_args() { + public grant_revoke_privileges_args() { } - public revoke_privileges_args( - PrivilegeBag privileges) + public grant_revoke_privileges_args( + GrantRevokePrivilegeRequest request) { this(); - this.privileges = privileges; + this.request = request; } /** * Performs a deep copy on other. */ - public revoke_privileges_args(revoke_privileges_args other) { - if (other.isSetPrivileges()) { - this.privileges = new PrivilegeBag(other.privileges); + public grant_revoke_privileges_args(grant_revoke_privileges_args other) { + if (other.isSetRequest()) { + this.request = new GrantRevokePrivilegeRequest(other.request); } } - public revoke_privileges_args deepCopy() { - return new revoke_privileges_args(this); + public grant_revoke_privileges_args deepCopy() { + return new grant_revoke_privileges_args(this); } @Override public void clear() { - this.privileges = null; + this.request = null; } - public PrivilegeBag getPrivileges() { - return this.privileges; + public GrantRevokePrivilegeRequest getRequest() { + return this.request; } - public void setPrivileges(PrivilegeBag privileges) { - this.privileges = privileges; + public void setRequest(GrantRevokePrivilegeRequest request) { + this.request = request; } - public void unsetPrivileges() { - this.privileges = null; + public void unsetRequest() { + this.request = null; } - /** Returns true if field privileges is set (has been assigned a value) and false otherwise */ - public boolean isSetPrivileges() { - return this.privileges != null; + /** Returns true if field request is set (has been assigned a value) and false otherwise */ + public boolean isSetRequest() { + return this.request != null; } - public void setPrivilegesIsSet(boolean value) { + public void setRequestIsSet(boolean value) { if (!value) { - this.privileges = null; + this.request = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { - case PRIVILEGES: + case REQUEST: if (value == null) { - unsetPrivileges(); + unsetRequest(); } else { - setPrivileges((PrivilegeBag)value); + setRequest((GrantRevokePrivilegeRequest)value); } break; @@ -167732,8 +168708,8 @@ public void setFieldValue(_Fields field, Object value) { public Object getFieldValue(_Fields field) { switch (field) { - case PRIVILEGES: - return getPrivileges(); + case REQUEST: + return getRequest(); } throw new IllegalStateException(); @@ -167746,8 +168722,8 @@ public boolean isSet(_Fields field) { } switch (field) { - case PRIVILEGES: - return isSetPrivileges(); + case REQUEST: + return isSetRequest(); } throw new IllegalStateException(); } @@ -167756,21 +168732,21 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof revoke_privileges_args) - return this.equals((revoke_privileges_args)that); + if (that instanceof grant_revoke_privileges_args) + return this.equals((grant_revoke_privileges_args)that); return false; } - public boolean equals(revoke_privileges_args that) { + public boolean equals(grant_revoke_privileges_args that) { if (that == null) return false; - boolean this_present_privileges = true && this.isSetPrivileges(); - boolean that_present_privileges = true && that.isSetPrivileges(); - if (this_present_privileges || that_present_privileges) { - if (!(this_present_privileges && that_present_privileges)) + boolean this_present_request = true && this.isSetRequest(); + boolean that_present_request = true && that.isSetRequest(); + if (this_present_request || that_present_request) { + if (!(this_present_request && that_present_request)) return false; - if (!this.privileges.equals(that.privileges)) + if (!this.request.equals(that.request)) return false; } @@ -167781,28 +168757,28 @@ public boolean equals(revoke_privileges_args that) { public int hashCode() { List list = new ArrayList(); - boolean present_privileges = true && (isSetPrivileges()); - list.add(present_privileges); - if (present_privileges) - list.add(privileges); + boolean present_request = true && (isSetRequest()); + list.add(present_request); + if (present_request) + list.add(request); return list.hashCode(); } @Override - public int compareTo(revoke_privileges_args other) { + public int compareTo(grant_revoke_privileges_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; - lastComparison = Boolean.valueOf(isSetPrivileges()).compareTo(other.isSetPrivileges()); + lastComparison = Boolean.valueOf(isSetRequest()).compareTo(other.isSetRequest()); if (lastComparison != 0) { return lastComparison; } - if (isSetPrivileges()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.privileges, other.privileges); + if (isSetRequest()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.request, other.request); if (lastComparison != 0) { return lastComparison; } @@ -167824,14 +168800,14 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("revoke_privileges_args("); + StringBuilder sb = new StringBuilder("grant_revoke_privileges_args("); boolean first = true; - sb.append("privileges:"); - if (this.privileges == null) { + sb.append("request:"); + if (this.request == null) { sb.append("null"); } else { - sb.append(this.privileges); + sb.append(this.request); } first = false; sb.append(")"); @@ -167841,8 +168817,8 @@ public String toString() { public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity - if (privileges != null) { - privileges.validate(); + if (request != null) { + request.validate(); } } @@ -167862,15 +168838,15 @@ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException } } - private static class revoke_privileges_argsStandardSchemeFactory implements SchemeFactory { - public revoke_privileges_argsStandardScheme getScheme() { - return new revoke_privileges_argsStandardScheme(); + private static class grant_revoke_privileges_argsStandardSchemeFactory implements SchemeFactory { + public grant_revoke_privileges_argsStandardScheme getScheme() { + return new grant_revoke_privileges_argsStandardScheme(); } } - private static class revoke_privileges_argsStandardScheme extends StandardScheme { + private static class grant_revoke_privileges_argsStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -167880,11 +168856,11 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_a break; } switch (schemeField.id) { - case 1: // PRIVILEGES + case 1: // REQUEST if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { - struct.privileges = new PrivilegeBag(); - struct.privileges.read(iprot); - struct.setPrivilegesIsSet(true); + struct.request = new GrantRevokePrivilegeRequest(); + struct.request.read(iprot); + struct.setRequestIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -167898,13 +168874,13 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_a struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, revoke_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); - if (struct.privileges != null) { - oprot.writeFieldBegin(PRIVILEGES_FIELD_DESC); - struct.privileges.write(oprot); + if (struct.request != null) { + oprot.writeFieldBegin(REQUEST_FIELD_DESC); + struct.request.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); @@ -167913,54 +168889,54 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, revoke_privileges_ } - private static class revoke_privileges_argsTupleSchemeFactory implements SchemeFactory { - public revoke_privileges_argsTupleScheme getScheme() { - return new revoke_privileges_argsTupleScheme(); + private static class grant_revoke_privileges_argsTupleSchemeFactory implements SchemeFactory { + public grant_revoke_privileges_argsTupleScheme getScheme() { + return new grant_revoke_privileges_argsTupleScheme(); } } - private static class revoke_privileges_argsTupleScheme extends TupleScheme { + private static class grant_revoke_privileges_argsTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); - if (struct.isSetPrivileges()) { + if (struct.isSetRequest()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); - if (struct.isSetPrivileges()) { - struct.privileges.write(oprot); + if (struct.isSetRequest()) { + struct.request.write(oprot); } } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { - struct.privileges = new PrivilegeBag(); - struct.privileges.read(iprot); - struct.setPrivilegesIsSet(true); + struct.request = new GrantRevokePrivilegeRequest(); + struct.request.read(iprot); + struct.setRequestIsSet(true); } } } } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class revoke_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("revoke_privileges_result"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_revoke_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_revoke_privileges_result"); - private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0); + private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0); private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new revoke_privileges_resultStandardSchemeFactory()); - schemes.put(TupleScheme.class, new revoke_privileges_resultTupleSchemeFactory()); + schemes.put(StandardScheme.class, new grant_revoke_privileges_resultStandardSchemeFactory()); + schemes.put(TupleScheme.class, new grant_revoke_privileges_resultTupleSchemeFactory()); } - private boolean success; // required + private GrantRevokePrivilegeResponse success; // required private MetaException o1; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ @@ -168025,74 +169001,72 @@ public String getFieldName() { } // isset id assignments - private static final int __SUCCESS_ISSET_ID = 0; - private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, - new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GrantRevokePrivilegeResponse.class))); tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(revoke_privileges_result.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_revoke_privileges_result.class, metaDataMap); } - public revoke_privileges_result() { + public grant_revoke_privileges_result() { } - public revoke_privileges_result( - boolean success, + public grant_revoke_privileges_result( + GrantRevokePrivilegeResponse success, MetaException o1) { this(); this.success = success; - setSuccessIsSet(true); this.o1 = o1; } /** * Performs a deep copy on other. */ - public revoke_privileges_result(revoke_privileges_result other) { - __isset_bitfield = other.__isset_bitfield; - this.success = other.success; + public grant_revoke_privileges_result(grant_revoke_privileges_result other) { + if (other.isSetSuccess()) { + this.success = new GrantRevokePrivilegeResponse(other.success); + } if (other.isSetO1()) { this.o1 = new MetaException(other.o1); } } - public revoke_privileges_result deepCopy() { - return new revoke_privileges_result(this); + public grant_revoke_privileges_result deepCopy() { + return new grant_revoke_privileges_result(this); } @Override public void clear() { - setSuccessIsSet(false); - this.success = false; + this.success = null; this.o1 = null; } - public boolean isSuccess() { + public GrantRevokePrivilegeResponse getSuccess() { return this.success; } - public void setSuccess(boolean success) { + public void setSuccess(GrantRevokePrivilegeResponse success) { this.success = success; - setSuccessIsSet(true); } public void unsetSuccess() { - __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID); + this.success = null; } /** Returns true if field success is set (has been assigned a value) and false otherwise */ public boolean isSetSuccess() { - return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID); + return this.success != null; } public void setSuccessIsSet(boolean value) { - __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value); + if (!value) { + this.success = null; + } } public MetaException getO1() { @@ -168124,7 +169098,7 @@ public void setFieldValue(_Fields field, Object value) { if (value == null) { unsetSuccess(); } else { - setSuccess((Boolean)value); + setSuccess((GrantRevokePrivilegeResponse)value); } break; @@ -168142,7 +169116,7 @@ public void setFieldValue(_Fields field, Object value) { public Object getFieldValue(_Fields field) { switch (field) { case SUCCESS: - return isSuccess(); + return getSuccess(); case O1: return getO1(); @@ -168170,21 +169144,21 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof revoke_privileges_result) - return this.equals((revoke_privileges_result)that); + if (that instanceof grant_revoke_privileges_result) + return this.equals((grant_revoke_privileges_result)that); return false; } - public boolean equals(revoke_privileges_result that) { + public boolean equals(grant_revoke_privileges_result that) { if (that == null) return false; - boolean this_present_success = true; - boolean that_present_success = true; + boolean this_present_success = true && this.isSetSuccess(); + boolean that_present_success = true && that.isSetSuccess(); if (this_present_success || that_present_success) { if (!(this_present_success && that_present_success)) return false; - if (this.success != that.success) + if (!this.success.equals(that.success)) return false; } @@ -168204,7 +169178,7 @@ public boolean equals(revoke_privileges_result that) { public int hashCode() { List list = new ArrayList(); - boolean present_success = true; + boolean present_success = true && (isSetSuccess()); list.add(present_success); if (present_success) list.add(success); @@ -168218,7 +169192,7 @@ public int hashCode() { } @Override - public int compareTo(revoke_privileges_result other) { + public int compareTo(grant_revoke_privileges_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } @@ -168262,11 +169236,15 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("revoke_privileges_result("); + StringBuilder sb = new StringBuilder("grant_revoke_privileges_result("); boolean first = true; sb.append("success:"); - sb.append(this.success); + if (this.success == null) { + sb.append("null"); + } else { + sb.append(this.success); + } first = false; if (!first) sb.append(", "); sb.append("o1:"); @@ -168283,6 +169261,9 @@ public String toString() { public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity + if (success != null) { + success.validate(); + } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { @@ -168295,23 +169276,21 @@ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOExcept private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { - // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. - __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } - private static class revoke_privileges_resultStandardSchemeFactory implements SchemeFactory { - public revoke_privileges_resultStandardScheme getScheme() { - return new revoke_privileges_resultStandardScheme(); + private static class grant_revoke_privileges_resultStandardSchemeFactory implements SchemeFactory { + public grant_revoke_privileges_resultStandardScheme getScheme() { + return new grant_revoke_privileges_resultStandardScheme(); } } - private static class revoke_privileges_resultStandardScheme extends StandardScheme { + private static class grant_revoke_privileges_resultStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -168322,8 +169301,9 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_r } switch (schemeField.id) { case 0: // SUCCESS - if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { - struct.success = iprot.readBool(); + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { + struct.success = new GrantRevokePrivilegeResponse(); + struct.success.read(iprot); struct.setSuccessIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); @@ -168347,13 +169327,13 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, revoke_privileges_r struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, revoke_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); - if (struct.isSetSuccess()) { + if (struct.success != null) { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); - oprot.writeBool(struct.success); + struct.success.write(oprot); oprot.writeFieldEnd(); } if (struct.o1 != null) { @@ -168367,16 +169347,16 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, revoke_privileges_ } - private static class revoke_privileges_resultTupleSchemeFactory implements SchemeFactory { - public revoke_privileges_resultTupleScheme getScheme() { - return new revoke_privileges_resultTupleScheme(); + private static class grant_revoke_privileges_resultTupleSchemeFactory implements SchemeFactory { + public grant_revoke_privileges_resultTupleScheme getScheme() { + return new grant_revoke_privileges_resultTupleScheme(); } } - private static class revoke_privileges_resultTupleScheme extends TupleScheme { + private static class grant_revoke_privileges_resultTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetSuccess()) { @@ -168387,7 +169367,7 @@ public void write(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_r } oprot.writeBitSet(optionals, 2); if (struct.isSetSuccess()) { - oprot.writeBool(struct.success); + struct.success.write(oprot); } if (struct.isSetO1()) { struct.o1.write(oprot); @@ -168395,11 +169375,12 @@ public void write(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_r } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { - struct.success = iprot.readBool(); + struct.success = new GrantRevokePrivilegeResponse(); + struct.success.read(iprot); struct.setSuccessIsSet(true); } if (incoming.get(1)) { @@ -168412,22 +169393,25 @@ public void read(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_re } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_revoke_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_revoke_privileges_args"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class refresh_privileges_args implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("refresh_privileges_args"); - private static final org.apache.thrift.protocol.TField REQUEST_FIELD_DESC = new org.apache.thrift.protocol.TField("request", org.apache.thrift.protocol.TType.STRUCT, (short)1); + private static final org.apache.thrift.protocol.TField OBJ_TO_REFRESH_FIELD_DESC = new org.apache.thrift.protocol.TField("objToRefresh", org.apache.thrift.protocol.TType.STRUCT, (short)1); + private static final org.apache.thrift.protocol.TField GRANT_REQUEST_FIELD_DESC = new org.apache.thrift.protocol.TField("grantRequest", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new grant_revoke_privileges_argsStandardSchemeFactory()); - schemes.put(TupleScheme.class, new grant_revoke_privileges_argsTupleSchemeFactory()); + schemes.put(StandardScheme.class, new refresh_privileges_argsStandardSchemeFactory()); + schemes.put(TupleScheme.class, new refresh_privileges_argsTupleSchemeFactory()); } - private GrantRevokePrivilegeRequest request; // required + private HiveObjectRef objToRefresh; // required + private GrantRevokePrivilegeRequest grantRequest; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { - REQUEST((short)1, "request"); + OBJ_TO_REFRESH((short)1, "objToRefresh"), + GRANT_REQUEST((short)2, "grantRequest"); private static final Map byName = new HashMap(); @@ -168442,8 +169426,10 @@ public void read(org.apache.thrift.protocol.TProtocol prot, revoke_privileges_re */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { - case 1: // REQUEST - return REQUEST; + case 1: // OBJ_TO_REFRESH + return OBJ_TO_REFRESH; + case 2: // GRANT_REQUEST + return GRANT_REQUEST; default: return null; } @@ -168487,70 +169473,109 @@ public String getFieldName() { public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); - tmpMap.put(_Fields.REQUEST, new org.apache.thrift.meta_data.FieldMetaData("request", org.apache.thrift.TFieldRequirementType.DEFAULT, + tmpMap.put(_Fields.OBJ_TO_REFRESH, new org.apache.thrift.meta_data.FieldMetaData("objToRefresh", org.apache.thrift.TFieldRequirementType.DEFAULT, + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HiveObjectRef.class))); + tmpMap.put(_Fields.GRANT_REQUEST, new org.apache.thrift.meta_data.FieldMetaData("grantRequest", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GrantRevokePrivilegeRequest.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_revoke_privileges_args.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(refresh_privileges_args.class, metaDataMap); } - public grant_revoke_privileges_args() { + public refresh_privileges_args() { } - public grant_revoke_privileges_args( - GrantRevokePrivilegeRequest request) + public refresh_privileges_args( + HiveObjectRef objToRefresh, + GrantRevokePrivilegeRequest grantRequest) { this(); - this.request = request; + this.objToRefresh = objToRefresh; + this.grantRequest = grantRequest; } /** * Performs a deep copy on other. */ - public grant_revoke_privileges_args(grant_revoke_privileges_args other) { - if (other.isSetRequest()) { - this.request = new GrantRevokePrivilegeRequest(other.request); + public refresh_privileges_args(refresh_privileges_args other) { + if (other.isSetObjToRefresh()) { + this.objToRefresh = new HiveObjectRef(other.objToRefresh); + } + if (other.isSetGrantRequest()) { + this.grantRequest = new GrantRevokePrivilegeRequest(other.grantRequest); } } - public grant_revoke_privileges_args deepCopy() { - return new grant_revoke_privileges_args(this); + public refresh_privileges_args deepCopy() { + return new refresh_privileges_args(this); } @Override public void clear() { - this.request = null; + this.objToRefresh = null; + this.grantRequest = null; } - public GrantRevokePrivilegeRequest getRequest() { - return this.request; + public HiveObjectRef getObjToRefresh() { + return this.objToRefresh; } - public void setRequest(GrantRevokePrivilegeRequest request) { - this.request = request; + public void setObjToRefresh(HiveObjectRef objToRefresh) { + this.objToRefresh = objToRefresh; } - public void unsetRequest() { - this.request = null; + public void unsetObjToRefresh() { + this.objToRefresh = null; } - /** Returns true if field request is set (has been assigned a value) and false otherwise */ - public boolean isSetRequest() { - return this.request != null; + /** Returns true if field objToRefresh is set (has been assigned a value) and false otherwise */ + public boolean isSetObjToRefresh() { + return this.objToRefresh != null; } - public void setRequestIsSet(boolean value) { + public void setObjToRefreshIsSet(boolean value) { if (!value) { - this.request = null; + this.objToRefresh = null; + } + } + + public GrantRevokePrivilegeRequest getGrantRequest() { + return this.grantRequest; + } + + public void setGrantRequest(GrantRevokePrivilegeRequest grantRequest) { + this.grantRequest = grantRequest; + } + + public void unsetGrantRequest() { + this.grantRequest = null; + } + + /** Returns true if field grantRequest is set (has been assigned a value) and false otherwise */ + public boolean isSetGrantRequest() { + return this.grantRequest != null; + } + + public void setGrantRequestIsSet(boolean value) { + if (!value) { + this.grantRequest = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { - case REQUEST: + case OBJ_TO_REFRESH: if (value == null) { - unsetRequest(); + unsetObjToRefresh(); } else { - setRequest((GrantRevokePrivilegeRequest)value); + setObjToRefresh((HiveObjectRef)value); + } + break; + + case GRANT_REQUEST: + if (value == null) { + unsetGrantRequest(); + } else { + setGrantRequest((GrantRevokePrivilegeRequest)value); } break; @@ -168559,8 +169584,11 @@ public void setFieldValue(_Fields field, Object value) { public Object getFieldValue(_Fields field) { switch (field) { - case REQUEST: - return getRequest(); + case OBJ_TO_REFRESH: + return getObjToRefresh(); + + case GRANT_REQUEST: + return getGrantRequest(); } throw new IllegalStateException(); @@ -168573,8 +169601,10 @@ public boolean isSet(_Fields field) { } switch (field) { - case REQUEST: - return isSetRequest(); + case OBJ_TO_REFRESH: + return isSetObjToRefresh(); + case GRANT_REQUEST: + return isSetGrantRequest(); } throw new IllegalStateException(); } @@ -168583,21 +169613,30 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof grant_revoke_privileges_args) - return this.equals((grant_revoke_privileges_args)that); + if (that instanceof refresh_privileges_args) + return this.equals((refresh_privileges_args)that); return false; } - public boolean equals(grant_revoke_privileges_args that) { + public boolean equals(refresh_privileges_args that) { if (that == null) return false; - boolean this_present_request = true && this.isSetRequest(); - boolean that_present_request = true && that.isSetRequest(); - if (this_present_request || that_present_request) { - if (!(this_present_request && that_present_request)) + boolean this_present_objToRefresh = true && this.isSetObjToRefresh(); + boolean that_present_objToRefresh = true && that.isSetObjToRefresh(); + if (this_present_objToRefresh || that_present_objToRefresh) { + if (!(this_present_objToRefresh && that_present_objToRefresh)) return false; - if (!this.request.equals(that.request)) + if (!this.objToRefresh.equals(that.objToRefresh)) + return false; + } + + boolean this_present_grantRequest = true && this.isSetGrantRequest(); + boolean that_present_grantRequest = true && that.isSetGrantRequest(); + if (this_present_grantRequest || that_present_grantRequest) { + if (!(this_present_grantRequest && that_present_grantRequest)) + return false; + if (!this.grantRequest.equals(that.grantRequest)) return false; } @@ -168608,28 +169647,43 @@ public boolean equals(grant_revoke_privileges_args that) { public int hashCode() { List list = new ArrayList(); - boolean present_request = true && (isSetRequest()); - list.add(present_request); - if (present_request) - list.add(request); + boolean present_objToRefresh = true && (isSetObjToRefresh()); + list.add(present_objToRefresh); + if (present_objToRefresh) + list.add(objToRefresh); + + boolean present_grantRequest = true && (isSetGrantRequest()); + list.add(present_grantRequest); + if (present_grantRequest) + list.add(grantRequest); return list.hashCode(); } @Override - public int compareTo(grant_revoke_privileges_args other) { + public int compareTo(refresh_privileges_args other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; - lastComparison = Boolean.valueOf(isSetRequest()).compareTo(other.isSetRequest()); + lastComparison = Boolean.valueOf(isSetObjToRefresh()).compareTo(other.isSetObjToRefresh()); if (lastComparison != 0) { return lastComparison; } - if (isSetRequest()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.request, other.request); + if (isSetObjToRefresh()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.objToRefresh, other.objToRefresh); + if (lastComparison != 0) { + return lastComparison; + } + } + lastComparison = Boolean.valueOf(isSetGrantRequest()).compareTo(other.isSetGrantRequest()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetGrantRequest()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.grantRequest, other.grantRequest); if (lastComparison != 0) { return lastComparison; } @@ -168651,14 +169705,22 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("grant_revoke_privileges_args("); + StringBuilder sb = new StringBuilder("refresh_privileges_args("); boolean first = true; - sb.append("request:"); - if (this.request == null) { + sb.append("objToRefresh:"); + if (this.objToRefresh == null) { sb.append("null"); } else { - sb.append(this.request); + sb.append(this.objToRefresh); + } + first = false; + if (!first) sb.append(", "); + sb.append("grantRequest:"); + if (this.grantRequest == null) { + sb.append("null"); + } else { + sb.append(this.grantRequest); } first = false; sb.append(")"); @@ -168668,8 +169730,11 @@ public String toString() { public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity - if (request != null) { - request.validate(); + if (objToRefresh != null) { + objToRefresh.validate(); + } + if (grantRequest != null) { + grantRequest.validate(); } } @@ -168689,15 +169754,15 @@ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException } } - private static class grant_revoke_privileges_argsStandardSchemeFactory implements SchemeFactory { - public grant_revoke_privileges_argsStandardScheme getScheme() { - return new grant_revoke_privileges_argsStandardScheme(); + private static class refresh_privileges_argsStandardSchemeFactory implements SchemeFactory { + public refresh_privileges_argsStandardScheme getScheme() { + return new refresh_privileges_argsStandardScheme(); } } - private static class grant_revoke_privileges_argsStandardScheme extends StandardScheme { + private static class refresh_privileges_argsStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, refresh_privileges_args struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -168707,11 +169772,20 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privil break; } switch (schemeField.id) { - case 1: // REQUEST + case 1: // OBJ_TO_REFRESH if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { - struct.request = new GrantRevokePrivilegeRequest(); - struct.request.read(iprot); - struct.setRequestIsSet(true); + struct.objToRefresh = new HiveObjectRef(); + struct.objToRefresh.read(iprot); + struct.setObjToRefreshIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + case 2: // GRANT_REQUEST + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { + struct.grantRequest = new GrantRevokePrivilegeRequest(); + struct.grantRequest.read(iprot); + struct.setGrantRequestIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -168725,13 +169799,18 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privil struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, refresh_privileges_args struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); - if (struct.request != null) { - oprot.writeFieldBegin(REQUEST_FIELD_DESC); - struct.request.write(oprot); + if (struct.objToRefresh != null) { + oprot.writeFieldBegin(OBJ_TO_REFRESH_FIELD_DESC); + struct.objToRefresh.write(oprot); + oprot.writeFieldEnd(); + } + if (struct.grantRequest != null) { + oprot.writeFieldBegin(GRANT_REQUEST_FIELD_DESC); + struct.grantRequest.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); @@ -168740,51 +169819,62 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, grant_revoke_privi } - private static class grant_revoke_privileges_argsTupleSchemeFactory implements SchemeFactory { - public grant_revoke_privileges_argsTupleScheme getScheme() { - return new grant_revoke_privileges_argsTupleScheme(); + private static class refresh_privileges_argsTupleSchemeFactory implements SchemeFactory { + public refresh_privileges_argsTupleScheme getScheme() { + return new refresh_privileges_argsTupleScheme(); } } - private static class grant_revoke_privileges_argsTupleScheme extends TupleScheme { + private static class refresh_privileges_argsTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, refresh_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); - if (struct.isSetRequest()) { + if (struct.isSetObjToRefresh()) { optionals.set(0); } - oprot.writeBitSet(optionals, 1); - if (struct.isSetRequest()) { - struct.request.write(oprot); + if (struct.isSetGrantRequest()) { + optionals.set(1); + } + oprot.writeBitSet(optionals, 2); + if (struct.isSetObjToRefresh()) { + struct.objToRefresh.write(oprot); + } + if (struct.isSetGrantRequest()) { + struct.grantRequest.write(oprot); } } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_args struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, refresh_privileges_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - BitSet incoming = iprot.readBitSet(1); + BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { - struct.request = new GrantRevokePrivilegeRequest(); - struct.request.read(iprot); - struct.setRequestIsSet(true); + struct.objToRefresh = new HiveObjectRef(); + struct.objToRefresh.read(iprot); + struct.setObjToRefreshIsSet(true); + } + if (incoming.get(1)) { + struct.grantRequest = new GrantRevokePrivilegeRequest(); + struct.grantRequest.read(iprot); + struct.setGrantRequestIsSet(true); } } } } - @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class grant_revoke_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("grant_revoke_privileges_result"); + @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class refresh_privileges_result implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("refresh_privileges_result"); private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0); private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { - schemes.put(StandardScheme.class, new grant_revoke_privileges_resultStandardSchemeFactory()); - schemes.put(TupleScheme.class, new grant_revoke_privileges_resultTupleSchemeFactory()); + schemes.put(StandardScheme.class, new refresh_privileges_resultStandardSchemeFactory()); + schemes.put(TupleScheme.class, new refresh_privileges_resultTupleSchemeFactory()); } private GrantRevokePrivilegeResponse success; // required @@ -168860,13 +169950,13 @@ public String getFieldName() { tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT))); metaDataMap = Collections.unmodifiableMap(tmpMap); - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(grant_revoke_privileges_result.class, metaDataMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(refresh_privileges_result.class, metaDataMap); } - public grant_revoke_privileges_result() { + public refresh_privileges_result() { } - public grant_revoke_privileges_result( + public refresh_privileges_result( GrantRevokePrivilegeResponse success, MetaException o1) { @@ -168878,7 +169968,7 @@ public grant_revoke_privileges_result( /** * Performs a deep copy on other. */ - public grant_revoke_privileges_result(grant_revoke_privileges_result other) { + public refresh_privileges_result(refresh_privileges_result other) { if (other.isSetSuccess()) { this.success = new GrantRevokePrivilegeResponse(other.success); } @@ -168887,8 +169977,8 @@ public grant_revoke_privileges_result(grant_revoke_privileges_result other) { } } - public grant_revoke_privileges_result deepCopy() { - return new grant_revoke_privileges_result(this); + public refresh_privileges_result deepCopy() { + return new refresh_privileges_result(this); } @Override @@ -168995,12 +170085,12 @@ public boolean isSet(_Fields field) { public boolean equals(Object that) { if (that == null) return false; - if (that instanceof grant_revoke_privileges_result) - return this.equals((grant_revoke_privileges_result)that); + if (that instanceof refresh_privileges_result) + return this.equals((refresh_privileges_result)that); return false; } - public boolean equals(grant_revoke_privileges_result that) { + public boolean equals(refresh_privileges_result that) { if (that == null) return false; @@ -169043,7 +170133,7 @@ public int hashCode() { } @Override - public int compareTo(grant_revoke_privileges_result other) { + public int compareTo(refresh_privileges_result other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } @@ -169087,7 +170177,7 @@ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache. @Override public String toString() { - StringBuilder sb = new StringBuilder("grant_revoke_privileges_result("); + StringBuilder sb = new StringBuilder("refresh_privileges_result("); boolean first = true; sb.append("success:"); @@ -169133,15 +170223,15 @@ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException } } - private static class grant_revoke_privileges_resultStandardSchemeFactory implements SchemeFactory { - public grant_revoke_privileges_resultStandardScheme getScheme() { - return new grant_revoke_privileges_resultStandardScheme(); + private static class refresh_privileges_resultStandardSchemeFactory implements SchemeFactory { + public refresh_privileges_resultStandardScheme getScheme() { + return new refresh_privileges_resultStandardScheme(); } } - private static class grant_revoke_privileges_resultStandardScheme extends StandardScheme { + private static class refresh_privileges_resultStandardScheme extends StandardScheme { - public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol iprot, refresh_privileges_result struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) @@ -169178,7 +170268,7 @@ public void read(org.apache.thrift.protocol.TProtocol iprot, grant_revoke_privil struct.validate(); } - public void write(org.apache.thrift.protocol.TProtocol oprot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol oprot, refresh_privileges_result struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); @@ -169198,16 +170288,16 @@ public void write(org.apache.thrift.protocol.TProtocol oprot, grant_revoke_privi } - private static class grant_revoke_privileges_resultTupleSchemeFactory implements SchemeFactory { - public grant_revoke_privileges_resultTupleScheme getScheme() { - return new grant_revoke_privileges_resultTupleScheme(); + private static class refresh_privileges_resultTupleSchemeFactory implements SchemeFactory { + public refresh_privileges_resultTupleScheme getScheme() { + return new refresh_privileges_resultTupleScheme(); } } - private static class grant_revoke_privileges_resultTupleScheme extends TupleScheme { + private static class refresh_privileges_resultTupleScheme extends TupleScheme { @Override - public void write(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { + public void write(org.apache.thrift.protocol.TProtocol prot, refresh_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetSuccess()) { @@ -169226,7 +170316,7 @@ public void write(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privil } @Override - public void read(org.apache.thrift.protocol.TProtocol prot, grant_revoke_privileges_result struct) throws org.apache.thrift.TException { + public void read(org.apache.thrift.protocol.TProtocol prot, refresh_privileges_result struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { diff --git a/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php b/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php index 7a8a42a..67dd83c 100644 --- a/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php +++ b/standalone-metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php @@ -1061,6 +1061,13 @@ interface ThriftHiveMetastoreIf extends \FacebookServiceIf { */ public function grant_revoke_privileges(\metastore\GrantRevokePrivilegeRequest $request); /** + * @param \metastore\HiveObjectRef $objToRefresh + * @param \metastore\GrantRevokePrivilegeRequest $grantRequest + * @return \metastore\GrantRevokePrivilegeResponse + * @throws \metastore\MetaException + */ + public function refresh_privileges(\metastore\HiveObjectRef $objToRefresh, \metastore\GrantRevokePrivilegeRequest $grantRequest); + /** * @param string $user_name * @param string[] $group_names * @return string[] @@ -8905,6 +8912,61 @@ class ThriftHiveMetastoreClient extends \FacebookServiceClient implements \metas throw new \Exception("grant_revoke_privileges failed: unknown result"); } + public function refresh_privileges(\metastore\HiveObjectRef $objToRefresh, \metastore\GrantRevokePrivilegeRequest $grantRequest) + { + $this->send_refresh_privileges($objToRefresh, $grantRequest); + return $this->recv_refresh_privileges(); + } + + public function send_refresh_privileges(\metastore\HiveObjectRef $objToRefresh, \metastore\GrantRevokePrivilegeRequest $grantRequest) + { + $args = new \metastore\ThriftHiveMetastore_refresh_privileges_args(); + $args->objToRefresh = $objToRefresh; + $args->grantRequest = $grantRequest; + $bin_accel = ($this->output_ instanceof TBinaryProtocolAccelerated) && function_exists('thrift_protocol_write_binary'); + if ($bin_accel) + { + thrift_protocol_write_binary($this->output_, 'refresh_privileges', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite()); + } + else + { + $this->output_->writeMessageBegin('refresh_privileges', TMessageType::CALL, $this->seqid_); + $args->write($this->output_); + $this->output_->writeMessageEnd(); + $this->output_->getTransport()->flush(); + } + } + + public function recv_refresh_privileges() + { + $bin_accel = ($this->input_ instanceof TBinaryProtocolAccelerated) && function_exists('thrift_protocol_read_binary'); + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_refresh_privileges_result', $this->input_->isStrictRead()); + else + { + $rseqid = 0; + $fname = null; + $mtype = 0; + + $this->input_->readMessageBegin($fname, $mtype, $rseqid); + if ($mtype == TMessageType::EXCEPTION) { + $x = new TApplicationException(); + $x->read($this->input_); + $this->input_->readMessageEnd(); + throw $x; + } + $result = new \metastore\ThriftHiveMetastore_refresh_privileges_result(); + $result->read($this->input_); + $this->input_->readMessageEnd(); + } + if ($result->success !== null) { + return $result->success; + } + if ($result->o1 !== null) { + throw $result->o1; + } + throw new \Exception("refresh_privileges failed: unknown result"); + } + public function set_ugi($user_name, array $group_names) { $this->send_set_ugi($user_name, $group_names); @@ -44275,6 +44337,219 @@ class ThriftHiveMetastore_grant_revoke_privileges_result { } +class ThriftHiveMetastore_refresh_privileges_args { + static $_TSPEC; + + /** + * @var \metastore\HiveObjectRef + */ + public $objToRefresh = null; + /** + * @var \metastore\GrantRevokePrivilegeRequest + */ + public $grantRequest = null; + + public function __construct($vals=null) { + if (!isset(self::$_TSPEC)) { + self::$_TSPEC = array( + 1 => array( + 'var' => 'objToRefresh', + 'type' => TType::STRUCT, + 'class' => '\metastore\HiveObjectRef', + ), + 2 => array( + 'var' => 'grantRequest', + 'type' => TType::STRUCT, + 'class' => '\metastore\GrantRevokePrivilegeRequest', + ), + ); + } + if (is_array($vals)) { + if (isset($vals['objToRefresh'])) { + $this->objToRefresh = $vals['objToRefresh']; + } + if (isset($vals['grantRequest'])) { + $this->grantRequest = $vals['grantRequest']; + } + } + } + + public function getName() { + return 'ThriftHiveMetastore_refresh_privileges_args'; + } + + public function read($input) + { + $xfer = 0; + $fname = null; + $ftype = 0; + $fid = 0; + $xfer += $input->readStructBegin($fname); + while (true) + { + $xfer += $input->readFieldBegin($fname, $ftype, $fid); + if ($ftype == TType::STOP) { + break; + } + switch ($fid) + { + case 1: + if ($ftype == TType::STRUCT) { + $this->objToRefresh = new \metastore\HiveObjectRef(); + $xfer += $this->objToRefresh->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + case 2: + if ($ftype == TType::STRUCT) { + $this->grantRequest = new \metastore\GrantRevokePrivilegeRequest(); + $xfer += $this->grantRequest->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + default: + $xfer += $input->skip($ftype); + break; + } + $xfer += $input->readFieldEnd(); + } + $xfer += $input->readStructEnd(); + return $xfer; + } + + public function write($output) { + $xfer = 0; + $xfer += $output->writeStructBegin('ThriftHiveMetastore_refresh_privileges_args'); + if ($this->objToRefresh !== null) { + if (!is_object($this->objToRefresh)) { + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA); + } + $xfer += $output->writeFieldBegin('objToRefresh', TType::STRUCT, 1); + $xfer += $this->objToRefresh->write($output); + $xfer += $output->writeFieldEnd(); + } + if ($this->grantRequest !== null) { + if (!is_object($this->grantRequest)) { + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA); + } + $xfer += $output->writeFieldBegin('grantRequest', TType::STRUCT, 2); + $xfer += $this->grantRequest->write($output); + $xfer += $output->writeFieldEnd(); + } + $xfer += $output->writeFieldStop(); + $xfer += $output->writeStructEnd(); + return $xfer; + } + +} + +class ThriftHiveMetastore_refresh_privileges_result { + static $_TSPEC; + + /** + * @var \metastore\GrantRevokePrivilegeResponse + */ + public $success = null; + /** + * @var \metastore\MetaException + */ + public $o1 = null; + + public function __construct($vals=null) { + if (!isset(self::$_TSPEC)) { + self::$_TSPEC = array( + 0 => array( + 'var' => 'success', + 'type' => TType::STRUCT, + 'class' => '\metastore\GrantRevokePrivilegeResponse', + ), + 1 => array( + 'var' => 'o1', + 'type' => TType::STRUCT, + 'class' => '\metastore\MetaException', + ), + ); + } + if (is_array($vals)) { + if (isset($vals['success'])) { + $this->success = $vals['success']; + } + if (isset($vals['o1'])) { + $this->o1 = $vals['o1']; + } + } + } + + public function getName() { + return 'ThriftHiveMetastore_refresh_privileges_result'; + } + + public function read($input) + { + $xfer = 0; + $fname = null; + $ftype = 0; + $fid = 0; + $xfer += $input->readStructBegin($fname); + while (true) + { + $xfer += $input->readFieldBegin($fname, $ftype, $fid); + if ($ftype == TType::STOP) { + break; + } + switch ($fid) + { + case 0: + if ($ftype == TType::STRUCT) { + $this->success = new \metastore\GrantRevokePrivilegeResponse(); + $xfer += $this->success->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + case 1: + if ($ftype == TType::STRUCT) { + $this->o1 = new \metastore\MetaException(); + $xfer += $this->o1->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + default: + $xfer += $input->skip($ftype); + break; + } + $xfer += $input->readFieldEnd(); + } + $xfer += $input->readStructEnd(); + return $xfer; + } + + public function write($output) { + $xfer = 0; + $xfer += $output->writeStructBegin('ThriftHiveMetastore_refresh_privileges_result'); + if ($this->success !== null) { + if (!is_object($this->success)) { + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA); + } + $xfer += $output->writeFieldBegin('success', TType::STRUCT, 0); + $xfer += $this->success->write($output); + $xfer += $output->writeFieldEnd(); + } + if ($this->o1 !== null) { + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1); + $xfer += $this->o1->write($output); + $xfer += $output->writeFieldEnd(); + } + $xfer += $output->writeFieldStop(); + $xfer += $output->writeStructEnd(); + return $xfer; + } + +} + class ThriftHiveMetastore_set_ugi_args { static $_TSPEC; diff --git a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote index 079c7fc..61fd2a8 100755 --- a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote +++ b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote @@ -151,6 +151,7 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help': print(' bool grant_privileges(PrivilegeBag privileges)') print(' bool revoke_privileges(PrivilegeBag privileges)') print(' GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilegeRequest request)') + print(' GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef objToRefresh, GrantRevokePrivilegeRequest grantRequest)') print(' set_ugi(string user_name, group_names)') print(' string get_delegation_token(string token_owner, string renewer_kerberos_principal_name)') print(' i64 renew_delegation_token(string token_str_form)') @@ -1057,6 +1058,12 @@ elif cmd == 'grant_revoke_privileges': sys.exit(1) pp.pprint(client.grant_revoke_privileges(eval(args[0]),)) +elif cmd == 'refresh_privileges': + if len(args) != 2: + print('refresh_privileges requires 2 args') + sys.exit(1) + pp.pprint(client.refresh_privileges(eval(args[0]),eval(args[1]),)) + elif cmd == 'set_ugi': if len(args) != 2: print('set_ugi requires 2 args') diff --git a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py index b0e64d8..e2b2298 100644 --- a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py +++ b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py @@ -1061,6 +1061,14 @@ def grant_revoke_privileges(self, request): """ pass + def refresh_privileges(self, objToRefresh, grantRequest): + """ + Parameters: + - objToRefresh + - grantRequest + """ + pass + def set_ugi(self, user_name, group_names): """ Parameters: @@ -6309,6 +6317,41 @@ def recv_grant_revoke_privileges(self): raise result.o1 raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_privileges failed: unknown result") + def refresh_privileges(self, objToRefresh, grantRequest): + """ + Parameters: + - objToRefresh + - grantRequest + """ + self.send_refresh_privileges(objToRefresh, grantRequest) + return self.recv_refresh_privileges() + + def send_refresh_privileges(self, objToRefresh, grantRequest): + self._oprot.writeMessageBegin('refresh_privileges', TMessageType.CALL, self._seqid) + args = refresh_privileges_args() + args.objToRefresh = objToRefresh + args.grantRequest = grantRequest + args.write(self._oprot) + self._oprot.writeMessageEnd() + self._oprot.trans.flush() + + def recv_refresh_privileges(self): + iprot = self._iprot + (fname, mtype, rseqid) = iprot.readMessageBegin() + if mtype == TMessageType.EXCEPTION: + x = TApplicationException() + x.read(iprot) + iprot.readMessageEnd() + raise x + result = refresh_privileges_result() + result.read(iprot) + iprot.readMessageEnd() + if result.success is not None: + return result.success + if result.o1 is not None: + raise result.o1 + raise TApplicationException(TApplicationException.MISSING_RESULT, "refresh_privileges failed: unknown result") + def set_ugi(self, user_name, group_names): """ Parameters: @@ -8930,6 +8973,7 @@ def __init__(self, handler): self._processMap["grant_privileges"] = Processor.process_grant_privileges self._processMap["revoke_privileges"] = Processor.process_revoke_privileges self._processMap["grant_revoke_privileges"] = Processor.process_grant_revoke_privileges + self._processMap["refresh_privileges"] = Processor.process_refresh_privileges self._processMap["set_ugi"] = Processor.process_set_ugi self._processMap["get_delegation_token"] = Processor.process_get_delegation_token self._processMap["renew_delegation_token"] = Processor.process_renew_delegation_token @@ -12262,6 +12306,28 @@ def process_grant_revoke_privileges(self, seqid, iprot, oprot): oprot.writeMessageEnd() oprot.trans.flush() + def process_refresh_privileges(self, seqid, iprot, oprot): + args = refresh_privileges_args() + args.read(iprot) + iprot.readMessageEnd() + result = refresh_privileges_result() + try: + result.success = self._handler.refresh_privileges(args.objToRefresh, args.grantRequest) + msg_type = TMessageType.REPLY + except (TTransport.TTransportException, KeyboardInterrupt, SystemExit): + raise + except MetaException as o1: + msg_type = TMessageType.REPLY + result.o1 = o1 + except Exception as ex: + msg_type = TMessageType.EXCEPTION + logging.exception(ex) + result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error') + oprot.writeMessageBegin("refresh_privileges", msg_type, seqid) + result.write(oprot) + oprot.writeMessageEnd() + oprot.trans.flush() + def process_set_ugi(self, seqid, iprot, oprot): args = set_ugi_args() args.read(iprot) @@ -36628,6 +36694,165 @@ def __eq__(self, other): def __ne__(self, other): return not (self == other) +class refresh_privileges_args: + """ + Attributes: + - objToRefresh + - grantRequest + """ + + thrift_spec = ( + None, # 0 + (1, TType.STRUCT, 'objToRefresh', (HiveObjectRef, HiveObjectRef.thrift_spec), None, ), # 1 + (2, TType.STRUCT, 'grantRequest', (GrantRevokePrivilegeRequest, GrantRevokePrivilegeRequest.thrift_spec), None, ), # 2 + ) + + def __init__(self, objToRefresh=None, grantRequest=None,): + self.objToRefresh = objToRefresh + self.grantRequest = grantRequest + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRUCT: + self.objToRefresh = HiveObjectRef() + self.objToRefresh.read(iprot) + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.STRUCT: + self.grantRequest = GrantRevokePrivilegeRequest() + self.grantRequest.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('refresh_privileges_args') + if self.objToRefresh is not None: + oprot.writeFieldBegin('objToRefresh', TType.STRUCT, 1) + self.objToRefresh.write(oprot) + oprot.writeFieldEnd() + if self.grantRequest is not None: + oprot.writeFieldBegin('grantRequest', TType.STRUCT, 2) + self.grantRequest.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + return + + + def __hash__(self): + value = 17 + value = (value * 31) ^ hash(self.objToRefresh) + value = (value * 31) ^ hash(self.grantRequest) + return value + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + +class refresh_privileges_result: + """ + Attributes: + - success + - o1 + """ + + thrift_spec = ( + (0, TType.STRUCT, 'success', (GrantRevokePrivilegeResponse, GrantRevokePrivilegeResponse.thrift_spec), None, ), # 0 + (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1 + ) + + def __init__(self, success=None, o1=None,): + self.success = success + self.o1 = o1 + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 0: + if ftype == TType.STRUCT: + self.success = GrantRevokePrivilegeResponse() + self.success.read(iprot) + else: + iprot.skip(ftype) + elif fid == 1: + if ftype == TType.STRUCT: + self.o1 = MetaException() + self.o1.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('refresh_privileges_result') + if self.success is not None: + oprot.writeFieldBegin('success', TType.STRUCT, 0) + self.success.write(oprot) + oprot.writeFieldEnd() + if self.o1 is not None: + oprot.writeFieldBegin('o1', TType.STRUCT, 1) + self.o1.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + + def validate(self): + return + + + def __hash__(self): + value = 17 + value = (value * 31) ^ hash(self.success) + value = (value * 31) ^ hash(self.o1) + return value + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + class set_ugi_args: """ Attributes: diff --git a/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb b/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb index 58ebd29..43b75e1 100644 --- a/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb +++ b/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb @@ -2161,6 +2161,22 @@ module ThriftHiveMetastore raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'grant_revoke_privileges failed: unknown result') end + def refresh_privileges(objToRefresh, grantRequest) + send_refresh_privileges(objToRefresh, grantRequest) + return recv_refresh_privileges() + end + + def send_refresh_privileges(objToRefresh, grantRequest) + send_message('refresh_privileges', Refresh_privileges_args, :objToRefresh => objToRefresh, :grantRequest => grantRequest) + end + + def recv_refresh_privileges() + result = receive_message(Refresh_privileges_result) + return result.success unless result.success.nil? + raise result.o1 unless result.o1.nil? + raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'refresh_privileges failed: unknown result') + end + def set_ugi(user_name, group_names) send_set_ugi(user_name, group_names) return recv_set_ugi() @@ -5076,6 +5092,17 @@ module ThriftHiveMetastore write_result(result, oprot, 'grant_revoke_privileges', seqid) end + def process_refresh_privileges(seqid, iprot, oprot) + args = read_args(iprot, Refresh_privileges_args) + result = Refresh_privileges_result.new() + begin + result.success = @handler.refresh_privileges(args.objToRefresh, args.grantRequest) + rescue ::MetaException => o1 + result.o1 = o1 + end + write_result(result, oprot, 'refresh_privileges', seqid) + end + def process_set_ugi(seqid, iprot, oprot) args = read_args(iprot, Set_ugi_args) result = Set_ugi_result.new() @@ -10822,6 +10849,42 @@ module ThriftHiveMetastore ::Thrift::Struct.generate_accessors self end + class Refresh_privileges_args + include ::Thrift::Struct, ::Thrift::Struct_Union + OBJTOREFRESH = 1 + GRANTREQUEST = 2 + + FIELDS = { + OBJTOREFRESH => {:type => ::Thrift::Types::STRUCT, :name => 'objToRefresh', :class => ::HiveObjectRef}, + GRANTREQUEST => {:type => ::Thrift::Types::STRUCT, :name => 'grantRequest', :class => ::GrantRevokePrivilegeRequest} + } + + def struct_fields; FIELDS; end + + def validate + end + + ::Thrift::Struct.generate_accessors self + end + + class Refresh_privileges_result + include ::Thrift::Struct, ::Thrift::Struct_Union + SUCCESS = 0 + O1 = 1 + + FIELDS = { + SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::GrantRevokePrivilegeResponse}, + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException} + } + + def struct_fields; FIELDS; end + + def validate + end + + ::Thrift::Struct.generate_accessors self + end + class Set_ugi_args include ::Thrift::Struct, ::Thrift::Struct_Union USER_NAME = 1 diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 565549a..67c1f20 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -6243,6 +6243,24 @@ public GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilege } @Override + public GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef objToRefresh, + GrantRevokePrivilegeRequest grantRequest) + throws TException { + incrementCounter("refresh_privileges"); + firePreEvent(new PreAuthorizationCallEvent(this)); + GrantRevokePrivilegeResponse response = new GrantRevokePrivilegeResponse(); + try { + boolean result = getMS().refreshPrivileges(objToRefresh, grantRequest.getPrivileges()); + response.setSuccess(result); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return response; + } + + @Override public boolean revoke_privileges(final PrivilegeBag privileges) throws TException { return revoke_privileges(privileges, false); } diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 9a43b2c..2737638 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -2235,6 +2235,29 @@ public boolean revoke_privileges(PrivilegeBag privileges, boolean grantOption) t } @Override + public boolean refresh_privileges(HiveObjectRef objToRefresh, + PrivilegeBag grantPrivileges) throws MetaException, + TException { + String defaultCat = getDefaultCatalog(conf); + objToRefresh.setCatName(defaultCat); + + if (grantPrivileges.getPrivileges() != null) { + for (HiveObjectPrivilege priv : grantPrivileges.getPrivileges()) { + if (!priv.getHiveObject().isSetCatName()) priv.getHiveObject().setCatName(defaultCat); + } + } + GrantRevokePrivilegeRequest grantReq = new GrantRevokePrivilegeRequest(); + grantReq.setRequestType(GrantRevokeType.GRANT); + grantReq.setPrivileges(grantPrivileges); + + GrantRevokePrivilegeResponse res = client.refresh_privileges(objToRefresh, grantReq); + if (!res.isSetSuccess()) { + throw new MetaException("GrantRevokePrivilegeResponse missing success field"); + } + return res.isSuccess(); + } + + @Override public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String userName, List groupNames) throws MetaException, TException { diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 98674cf..fe1d1bf 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -2567,6 +2567,16 @@ boolean revoke_privileges(PrivilegeBag privileges, boolean grantOption) throws MetaException, TException; /** + * @param revokePrivileges + * @param objToRefresh + * @return true on success + * @throws MetaException + * @throws TException + */ + public boolean refresh_privileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) + throws MetaException, TException; + + /** * This is expected to be a no-op when in local mode, * which means that the implementation will return null. * @param owner the intended owner for the token diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index c5da7b5..c7dbd9f 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -49,6 +49,7 @@ import java.util.Map.Entry; import java.util.Properties; import java.util.Set; +import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -227,6 +228,7 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; @@ -6054,6 +6056,88 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) return committed; } + class PrivilegeWithoutCreateTimeComparator implements Comparator { + @Override + public int compare(HiveObjectPrivilege o1, HiveObjectPrivilege o2) { + int createTime1 = o1.getGrantInfo().getCreateTime(); + int createTime2 = o2.getGrantInfo().getCreateTime(); + o1.getGrantInfo().setCreateTime(0); + o2.getGrantInfo().setCreateTime(0); + int result = o1.compareTo(o2); + o1.getGrantInfo().setCreateTime(createTime1); + o2.getGrantInfo().setCreateTime(createTime2); + return result; + } + } + + @Override + public boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) + throws InvalidObjectException, MetaException, NoSuchObjectException { + boolean committed = false; + try { + openTransaction(); + Set revokePrivilegeSet + = new TreeSet(new PrivilegeWithoutCreateTimeComparator()); + Set grantPrivilegeSet + = new TreeSet(new PrivilegeWithoutCreateTimeComparator()); + + List grants = null; + String catName = objToRefresh.isSetCatName() ? objToRefresh.getCatName() : + getDefaultCatalog(conf); + switch (objToRefresh.getObjectType()) { + case DATABASE: + grants = this.listDBGrantsAll(catName, objToRefresh.getDbName()); + break; + case TABLE: + grants = listTableGrantsAll(catName, objToRefresh.getDbName(), objToRefresh.getObjectName()); + break; + case COLUMN: + Preconditions.checkArgument(objToRefresh.getColumnName()==null, "columnName must be null"); + grants = convertTableCols(listTableAllColumnGrants(catName, + objToRefresh.getDbName(), objToRefresh.getObjectName())); + break; + default: + throw new MetaException("Unexpected object type " + objToRefresh.getObjectType()); + } + if (grants != null) { + for (HiveObjectPrivilege grant : grants) { + revokePrivilegeSet.add(grant); + } + } + + // Optimize revoke/grant list, remove the overlapping + if (grantPrivileges.getPrivileges() != null) { + for (HiveObjectPrivilege grantPrivilege : grantPrivileges.getPrivileges()) { + if (revokePrivilegeSet.contains(grantPrivilege)) { + revokePrivilegeSet.remove(grantPrivilege); + } else { + grantPrivilegeSet.add(grantPrivilege); + } + } + } + if (!revokePrivilegeSet.isEmpty()) { + PrivilegeBag remainingRevokePrivileges = new PrivilegeBag(); + for (HiveObjectPrivilege revokePrivilege : revokePrivilegeSet) { + remainingRevokePrivileges.addToPrivileges(revokePrivilege); + } + revokePrivileges(remainingRevokePrivileges, false); + } + if (!grantPrivilegeSet.isEmpty()) { + PrivilegeBag remainingGrantPrivileges = new PrivilegeBag(); + for (HiveObjectPrivilege grantPrivilege : grantPrivilegeSet) { + remainingGrantPrivileges.addToPrivileges(grantPrivilege); + } + grantPrivileges(remainingGrantPrivileges); + } + committed = commitTransaction(); + } finally { + if (!committed) { + rollbackTransaction(); + } + } + return committed; + } + @SuppressWarnings("unchecked") public List listMRoleMembers(String roleName) { boolean success = false; diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java index f6c46ee..afd90f0 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.ISchema; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.InvalidInputException; @@ -754,7 +755,10 @@ PrincipalPrivilegeSet getColumnPrivilegeSet (String catName, String dbName, Stri boolean grantPrivileges (PrivilegeBag privileges) throws InvalidObjectException, MetaException, NoSuchObjectException; - boolean revokePrivileges (PrivilegeBag privileges, boolean grantOption) + boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) + throws InvalidObjectException, MetaException, NoSuchObjectException; + + boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) throws InvalidObjectException, MetaException, NoSuchObjectException; org.apache.hadoop.hive.metastore.api.Role getRole( diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java index 1ce86bb..10e26f4a 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.ISchema; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.InvalidInputException; @@ -1422,6 +1423,12 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } @Override + public boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) + throws InvalidObjectException, MetaException, NoSuchObjectException { + return rawStore.refreshPrivileges(objToRefresh, grantPrivileges); + } + + @Override public Role getRole(String roleName) throws NoSuchObjectException { return rawStore.getRole(roleName); } diff --git a/standalone-metastore/src/main/thrift/hive_metastore.thrift b/standalone-metastore/src/main/thrift/hive_metastore.thrift index 612afe1..760e5cc 100644 --- a/standalone-metastore/src/main/thrift/hive_metastore.thrift +++ b/standalone-metastore/src/main/thrift/hive_metastore.thrift @@ -1995,6 +1995,8 @@ service ThriftHiveMetastore extends fb303.FacebookService // Deprecated, use grant_revoke_privileges() bool revoke_privileges(1:PrivilegeBag privileges) throws(1:MetaException o1) GrantRevokePrivilegeResponse grant_revoke_privileges(1:GrantRevokePrivilegeRequest request) throws(1:MetaException o1); + // Revokes all privileges for the object and adds the newly granted privileges for it. + GrantRevokePrivilegeResponse refresh_privileges(1:HiveObjectRef objToRefresh, 2:GrantRevokePrivilegeRequest grantRequest) throws(1:MetaException o1); // this is used by metastore client to send UGI information to metastore server immediately // after setting up a connection. diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java index 304f567..87bf834 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.ISchema; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -507,6 +508,11 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } @Override + public boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) + throws InvalidObjectException, MetaException, NoSuchObjectException { + return objectStore.refreshPrivileges(objToRefresh, grantPrivileges); + } + @Override public Role getRole(String roleName) throws NoSuchObjectException { return objectStore.getRole(roleName); } diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index 85c6727..4babf43 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.ISchema; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -519,6 +520,12 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } @Override + public boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges) + throws InvalidObjectException, MetaException, NoSuchObjectException { + return false; + } + + @Override public Role getRole(String roleName) throws NoSuchObjectException { return null; diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java index ecddc7b..41f5eb9 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.metastore; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; import java.io.IOException; import java.lang.reflect.Constructor; @@ -2032,6 +2033,29 @@ public boolean revoke_privileges(PrivilegeBag privileges, boolean grantOption) t } @Override + public boolean refresh_privileges(HiveObjectRef objToRefresh, + PrivilegeBag grantPrivileges) throws MetaException, + TException { + String defaultCat = getDefaultCatalog(conf); + objToRefresh.setCatName(defaultCat); + + if (grantPrivileges.getPrivileges() != null) { + for (HiveObjectPrivilege priv : grantPrivileges.getPrivileges()) { + if (!priv.getHiveObject().isSetCatName()) priv.getHiveObject().setCatName(defaultCat); + } + } + GrantRevokePrivilegeRequest grantReq = new GrantRevokePrivilegeRequest(); + grantReq.setRequestType(GrantRevokeType.GRANT); + grantReq.setPrivileges(grantPrivileges); + + GrantRevokePrivilegeResponse res = client.refresh_privileges(objToRefresh, grantReq); + if (!res.isSetSuccess()) { + throw new MetaException("GrantRevokePrivilegeResponse missing success field"); + } + return res.isSuccess(); + } + + @Override public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject, String userName, List groupNames) throws MetaException, TException {