diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java b/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java index 7302e0993a..cebfdff970 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java @@ -286,7 +286,7 @@ public void test() throws Exception { List args = new ArrayList(baseArgs); args.add("-f"); - args.add("../../metastore/scripts/upgrade/hive/hive-schema-3.1.0.hive.sql"); + args.add("../../metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql"); BeeLine beeLine = new BeeLine(); int result = beeLine.begin(args.toArray(new String[] {}), null); beeLine.close(); diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index c55f8db61a..2e72cb15f0 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -1698,6 +1698,7 @@ minillaplocal.query.files=\ smblimit.q,\ specialChar.q,\ split.q,\ + split_map_privs.q,\ stats14.q,\ stats15.q,\ stats16.q,\ diff --git a/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql b/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql index d857410f6e..f0a0851949 100644 --- a/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql +++ b/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql @@ -1672,6 +1672,7 @@ FROM JOIN `sys`.`TBLS` T ON (S.`SD_ID` = T.`SD_ID`) JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`) LEFT JOIN `sys`.`TBL_COL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`) + LEFT JOIN (SELECT * FROM `sys`.`TBL_COL_PRIVS` lateral view explode(split_map_privs(`TBL_COL_PRIVS`)) `TBL_COL_PRIVS` AS `TBL_COL_PRIVS`) P WHERE NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL AND C.`COLUMN_NAME` = P.`COLUMN_NAME` @@ -1700,7 +1701,8 @@ SELECT DISTINCT P.`TBL_COL_PRIV`, IF (P.`GRANT_OPTION` == 0, 'NO', 'YES') FROM - `sys`.`TBL_COL_PRIVS` P JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`) + (SELECT * FROM `sys`.`TBL_COL_PRIVS` lateral view explode(split_map_privs(`TBL_COL_PRIV`)) `TBL_COL_PRIV`) AS P + JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`) JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`) JOIN `sys`.`SDS` S ON (S.`SD_ID` = T.`SD_ID`) LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` = P2.`TBL_ID`) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 76e460ed7a..1a6fc4c1cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -285,6 +285,7 @@ system.registerGenericUDF("quote", GenericUDFQuote.class); system.registerGenericUDF("nvl", GenericUDFCoalesce.class); //HIVE-20961 system.registerGenericUDF("split", GenericUDFSplit.class); + system.registerGenericUDF("split_map_privs", GenericUDFStringToPrivilege.class); system.registerGenericUDF("str_to_map", GenericUDFStringToMap.class); system.registerGenericUDF("translate", GenericUDFTranslate.class); system.registerGenericUDF("validate_acid_sort_order", GenericUDFValidateAcidSortOrder.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java index c7a48430fa..41ccbe2ea7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java @@ -17,9 +17,12 @@ */ package org.apache.hadoop.hive.ql.security.authorization; +import java.util.Arrays; import java.util.Map; import java.util.concurrent.TimeUnit; +import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.curator.framework.recipes.leader.LeaderLatch; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -43,6 +46,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.xml.soap.Text; + /** * PrivilegeSynchronizer defines a thread to synchronize privileges from * external authorizer to Hive metastore. @@ -78,6 +83,9 @@ private void addACLsToBag( for (Map.Entry> principalAcls : principalAclsMap.entrySet()) { String principal = principalAcls.getKey(); + int[] columnPrivilegeBits = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 }; + int columnUpdateFlag = 0; + for (Map.Entry acl : principalAcls.getValue() .entrySet()) { if (acl.getValue() == HiveResourceACLs.AccessResult.ALLOWED) { @@ -95,16 +103,28 @@ private void addACLsToBag( (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false), authorizer)); break; case COLUMN: - privBag.addToPrivileges( - new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, null, columnName), - principal, principalType, new PrivilegeGrantInfo(acl.getKey().toString(), - (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false), authorizer)); + + int privilegeBit = acl.getKey().ordinal(); + columnPrivilegeBits[privilegeBit] = columnPrivilegeBits[privilegeBit] == 0 ? 1 : 0; + columnUpdateFlag = 1; + break; default: throw new RuntimeException("Get unknown object type " + objectType); } } } + if (columnUpdateFlag == 1) { + String columnPrivilegeBitsString = + StringUtils.join(Arrays.asList(ArrayUtils.toObject(columnPrivilegeBits)), " "); + privBag.addToPrivileges( + new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, null, columnName), + principal, principalType, + new PrivilegeGrantInfo(columnPrivilegeBitsString, (int) (System.currentTimeMillis() / 1000), GRANTOR, + PrincipalType.USER, false), authorizer)); + + columnUpdateFlag = 0; + } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToPrivilege.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToPrivilege.java new file mode 100644 index 0000000000..e4a13e2a7f --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToPrivilege.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.Description; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.io.Text; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +/** + * UDFSplitMapPrivs. + * + */ + +@Description(name = "split_map_privs", value = "_FUNC_(str, regex) - Splits binary str and maps to privilege type " + + "regex", extended = "Example:\n" + " > SELECT _FUNC_('0 1 1 0 1 1 0 0 0', ' ') FROM src LIMIT 1;\n" + + " [\"UPDATE\", \"CREATE\", \"ALTER\", \"INDEX\"]") class PrivilegeMap { + private Map privilegeMap = new HashMap(); + + Map getPrivilegeMap() { + + privilegeMap.put(0, "SELECT"); + privilegeMap.put(1, "UPDATE"); + privilegeMap.put(2, "CREATE"); + privilegeMap.put(3, "DROP"); + privilegeMap.put(4, "ALTER"); + privilegeMap.put(5, "INDEX"); + privilegeMap.put(6, "LOCK"); + privilegeMap.put(7, "READ"); + privilegeMap.put(8, "WRITE"); + privilegeMap.put(9, "ALL"); + + return privilegeMap; + } +} + +/** + * UDFSplitMapPrivs. + * "_FUNC_(str, regex) - Splits binary str and maps to privilege type " + * "Example: > SELECT _FUNC_('0 1 1 0 1 1 0 0 0', ' ') FROM src LIMIT 1;" + * output: " ["UPDATE", "CREATE", "ALTER", "INDEX"]" + */ +public class GenericUDFStringToPrivilege extends GenericUDF { + private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1]; + + private PrivilegeMap privsMap = new PrivilegeMap(); + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + checkArgsSize(arguments, 1, 1); + checkArgPrimitive(arguments, 0); + + converters[0] = ObjectInspectorConverters + .getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector); + + return ObjectInspectorFactory + .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector); + } + + @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { + assert (arguments.length == 1); + + if (arguments[0].get() == null) { + return null; + } + + Text s = (Text) converters[0].convert(arguments[0].get()); + ArrayList result = new ArrayList(); + int index = 0; + Map privs = privsMap.getPrivilegeMap(); + + for (String str : s.toString().split(" ", -1)) { + if ("1".equals(str)) { + result.add(new Text(privs.get(index))); + } + index++; + } + + return result; + } + + @Override protected String getFuncName() { + return "split_map_privs"; + } + + @Override public String getDisplayString(String[] children) { + assert (children.length == 1); + return getStandardDisplayString("split_map_privs", children); + } + +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestUDFSplitMapPrivs.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestUDFSplitMapPrivs.java new file mode 100644 index 0000000000..03df2a5e22 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestUDFSplitMapPrivs.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.hive.ql.udf.generic; + +import junit.framework.TestCase; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.io.Text; +import org.junit.Test; +import java.util.ArrayList; +import java.util.List; +import static java.util.Arrays.asList; +import static org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; +import static org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; + +/** +* +* This test a test for udf GenericUDFStringToPrivilege. +* +*/ +public class TestUDFSplitMapPrivs extends TestCase { + private final GenericUDFStringToPrivilege udf = new GenericUDFStringToPrivilege(); + private final Object p0 = new Text("SELECT"); + private final Object p1 = new Text("UPDATE"); + private final Object p2 = new Text("CREATE"); + private final Object p3 = new Text("DROP"); + private final Object p4 = new Text("ALTER"); + private final Object p5 = new Text("INDEX"); + private final Object p6 = new Text("LOCK"); + private final Object p7 = new Text("READ"); + private final Object p8 = new Text("WRITE"); + private final Object p9 = new Text("All"); + + + @Test public void testBinaryStringSplitMapToPrivs() throws HiveException { + + ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; + ObjectInspector[] initArgs = {valueOI0}; + + udf.initialize(initArgs); + + DeferredObject args; + DeferredObject[] evalArgs; + + args = new DeferredJavaObject(new Text("1 0 0 0 0 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 0 0 0 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 1 0 0 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1, p2), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 1 1 0 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1, p2, p3), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 1 1 1 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1, p2, p3, p4), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 1 1 1 1 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1, p2, p3, p4, p5), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 1 1 1 1 1 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1, p2, p3, p4, p5, p6), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 1 1 1 1 1 1 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p1, p2, p3, p4, p5, p6, p7), evalArgs); + + args = new DeferredJavaObject(new Text("1 0 1 1 1 1 1 1 1 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerify(asList(p0, p2, p3, p4, p5, p6, p7, p8), evalArgs); + + } + + @Test public void binaryStringMapingShouldFail() throws HiveException { + + ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector; + ObjectInspector[] initArgs = {valueOI0}; + + udf.initialize(initArgs); + DeferredObject args; + DeferredObject[] evalArgs; + + args = new DeferredJavaObject(new Text("1 0 0 0 0 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerifyNotTrue(asList(p1), evalArgs); + + args = new DeferredJavaObject(new Text("1 1 0 0 0 0 0 0 0 0")); + evalArgs = new DeferredObject[] {args}; + runAndVerifyNotTrue(asList(p0, p5), evalArgs); + + } + + private void runAndVerify(List expResult, DeferredObject[] evalArgs) throws HiveException { + + ArrayList output = (ArrayList) udf.evaluate(evalArgs); + assertEquals(expResult, output); + } + + private void runAndVerifyNotTrue(List expResult, DeferredObject[] evalArgs) throws HiveException { + + ArrayList output = (ArrayList) udf.evaluate(evalArgs); + assertNotSame(expResult, output); + } + +} diff --git a/ql/src/test/queries/clientpositive/split_map_privs.q b/ql/src/test/queries/clientpositive/split_map_privs.q new file mode 100644 index 0000000000..afaefbe704 --- /dev/null +++ b/ql/src/test/queries/clientpositive/split_map_privs.q @@ -0,0 +1,17 @@ +--! qt:dataset:src +set hive.fetch.task.conversion=more; + +use default; +DESCRIBE FUNCTION split_map_privs; +DESCRIBE FUNCTION EXTENDED split_map_privs; + +EXPLAIN SELECT + split_map_privs('1 0 0 0 0 0 0 0 0 0'), + split_map_privs('1 0 0 1 0 0 0 0 0 0') +FROM src tablesample (1 rows); + + +SELECT + split_map_privs('1 0 0 0 0 0 0 0 0 0'), + split_map_privs('1 0 0 1 0 0 0 0 0 0') +FROM src tablesample (1 rows); diff --git a/ql/src/test/results/clientpositive/llap/show_functions.q.out b/ql/src/test/results/clientpositive/llap/show_functions.q.out index 4b38cfb604..36c868d513 100644 --- a/ql/src/test/results/clientpositive/llap/show_functions.q.out +++ b/ql/src/test/results/clientpositive/llap/show_functions.q.out @@ -332,6 +332,7 @@ sort_array_by soundex space split +split_map_privs sq_count_check sqrt stack @@ -840,6 +841,7 @@ sort_array_by soundex space split +split_map_privs sq_count_check sqrt stack diff --git a/ql/src/test/results/clientpositive/llap/split_map_privs.q.out b/ql/src/test/results/clientpositive/llap/split_map_privs.q.out new file mode 100644 index 0000000000..6206dfa384 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/split_map_privs.q.out @@ -0,0 +1,63 @@ +PREHOOK: query: use default +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:default +POSTHOOK: query: use default +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:default +PREHOOK: query: DESCRIBE FUNCTION split_map_privs +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION split_map_privs +POSTHOOK: type: DESCFUNCTION +There is no documentation for function 'split_map_privs' +PREHOOK: query: DESCRIBE FUNCTION EXTENDED split_map_privs +PREHOOK: type: DESCFUNCTION +POSTHOOK: query: DESCRIBE FUNCTION EXTENDED split_map_privs +POSTHOOK: type: DESCFUNCTION +There is no documentation for function 'split_map_privs' +Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToPrivilege +Function type:BUILTIN +PREHOOK: query: EXPLAIN SELECT + split_map_privs('1 0 0 0 0 0 0 0 0 0'), + split_map_privs('1 0 0 1 0 0 0 0 0 0') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: EXPLAIN SELECT + split_map_privs('1 0 0 0 0 0 0 0 0 0'), + split_map_privs('1 0 0 1 0 0 0 0 0 0') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: src + Row Limit Per Split: 1 + Select Operator + expressions: split_map_privs('1 0 0 0 0 0 0 0 0 0') (type: array), split_map_privs('1 0 0 1 0 0 0 0 0 0') (type: array) + outputColumnNames: _col0, _col1 + ListSink + +PREHOOK: query: SELECT + split_map_privs('1 0 0 0 0 0 0 0 0 0'), + split_map_privs('1 0 0 1 0 0 0 0 0 0') +FROM src tablesample (1 rows) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: SELECT + split_map_privs('1 0 0 0 0 0 0 0 0 0'), + split_map_privs('1 0 0 1 0 0 0 0 0 0') +FROM src tablesample (1 rows) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +["SELECT"] ["SELECT","DROP"]