diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index a00d9075a4..6969960f47 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3779,6 +3779,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal HIVE_SERVER2_ENABLE_DOAS("hive.server2.enable.doAs", true, "Setting this property to true will have HiveServer2 execute\n" + "Hive operations as the user making the calls to it."), + HIVE_SERVER2_SERVICE_USERS("hive.server2.service.users", "", + "Comma separated list of users to have HiveServer2 skip authorization when compiling queries."), HIVE_DISTCP_DOAS_USER("hive.distcp.privileged.doAs","hive", "This property allows privileged distcp executions done by hive\n" + "to run as this user."), @@ -4884,6 +4886,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "hive.server2.authentication.ldap.userMembershipKey," + "hive.server2.authentication.ldap.groupClassKey," + "hive.server2.authentication.ldap.customLDAPQuery," + + "hive.server2.service.users," + "hive.privilege.synchronizer," + "hive.privilege.synchronizer.interval," + "hive.spark.client.connect.timeout," + diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java index d5641d983e..299fbf8bd9 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java @@ -88,6 +88,7 @@ public static void startServices() throws Exception { addToExpectedRestrictedMap("hive.server2.authentication.ldap.userMembershipKey"); addToExpectedRestrictedMap("hive.server2.authentication.ldap.groupClassKey"); addToExpectedRestrictedMap("hive.server2.authentication.ldap.customLDAPQuery"); + addToExpectedRestrictedMap("hive.server2.service.users"); addToExpectedRestrictedMap("hive.spark.client.channel.log.level"); addToExpectedRestrictedMap("hive.spark.client.secret.bits"); addToExpectedRestrictedMap("hive.spark.client.rpc.server.address"); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 19eb1dffc8..93bb2a3d14 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -22,6 +22,7 @@ import java.io.UnsupportedEncodingException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -37,6 +38,7 @@ import org.antlr.runtime.TokenRewriteStream; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -185,6 +187,18 @@ void setAutoCommitValue(Boolean autoCommit) { } public boolean skipAuthorization() { + SessionState ss = SessionState.get(); + String serviceUsers = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_SERVICE_USERS); + if (ss != null && StringUtils.isNotBlank(serviceUsers)) { + String[] users = StringUtils.split(serviceUsers, ","); + Set superUsers = new HashSet(Arrays.asList(users)); + String authUser = ss.getAuthenticator().getUserName(); + if (superUsers.contains(authUser)) { + console.logInfo("Skip authorization as the current user: " + authUser + + " is configured in " + HiveConf.ConfVars.HIVE_SERVER2_SERVICE_USERS.varname); + return true; + } + } return false; }