diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 81323f6..0453bcd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -43,6 +43,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -141,6 +142,10 @@ public synchronized void remove() { } }; + public static Hive get(Configuration c, Class clazz) throws HiveException { + return get(c instanceof HiveConf ? (HiveConf)c : new HiveConf(c, clazz)); + } + /** * Gets hive object for the current thread. If one is not initialized then a * new one is created If the new configuration is different in metadata conf diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java index 2fa512c..f26d1a2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.security.authorization; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -27,7 +26,7 @@ BitSetCheckedAuthorizationProvider { public void init(Configuration conf) throws HiveException { - hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); + hive_db = new HiveProxy(Hive.get(conf, HiveAuthorizationProvider.class)); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java index 0dfd997..f803cc4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java @@ -35,7 +35,6 @@ import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; @@ -83,7 +82,7 @@ private void initWh() throws MetaException, HiveException { // till we explicitly initialize it as being from the client side. So, we have a // chicken-and-egg problem. So, we now track whether or not we're running from client-side // in the SBAP itself. - hive_db = new HiveProxy(Hive.get(new HiveConf(getConf(), StorageBasedAuthorizationProvider.class))); + hive_db = new HiveProxy(Hive.get(getConf(), StorageBasedAuthorizationProvider.class)); this.wh = new Warehouse(getConf()); if (this.wh == null){ // If wh is still null after just having initialized it, bail out - something's very wrong. @@ -117,7 +116,7 @@ public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPri // Update to previous comment: there does seem to be one place that uses this // and that is to authorize "show databases" in hcat commandline, which is used - // by webhcat. And user-level auth seems to be a resonable default in this case. + // by webhcat. And user-level auth seems to be a reasonable default in this case. // The now deprecated HdfsAuthorizationProvider in hcatalog approached this in // another way, and that was to see if the user had said above appropriate requested // privileges for the hive root warehouse directory. That seems to be the best