diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java index 671257a..8f81ef9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java @@ -399,10 +399,12 @@ protected static void checkPermissions(final FileSystem fs, final FileStatus sta } try { FileUtils.checkFileAccessWithImpersonation(fs, stat, checkActions, user); - } catch (org.apache.hadoop.fs.permission.AccessControlException ace) { - // Older hadoop version will throw this @deprecated Exception. - throw accessControlException(ace); } catch (Exception err) { + // fs.permission.AccessControlException removed by HADOOP-11356, but Hive users on older + // Hadoop versions may still see this exception .. have to reference by name. + if (err.getClass().getName().equals("org.apache.hadoop.fs.permission.AccessControlException")) { + throw accessControlException(err); + } throw new HiveException(err); } } @@ -429,8 +431,7 @@ private AuthorizationException authorizationException(Exception e) { return new AuthorizationException(e); } - private static AccessControlException accessControlException( - org.apache.hadoop.fs.permission.AccessControlException e) { + private static AccessControlException accessControlException(Exception e) { AccessControlException ace = new AccessControlException(e.getMessage()); ace.initCause(e); return ace; diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java index bb4a190..87b79b3 100644 --- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java +++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java @@ -965,8 +965,10 @@ private static Exception wrapAccessException(Exception err) { final int maxDepth = 20; Throwable curErr = err; for (int idx = 0; curErr != null && idx < maxDepth; ++idx) { + // fs.permission.AccessControlException removed by HADOOP-11356, but Hive users on older + // Hadoop versions may still see this exception .. have to reference by name. if (curErr instanceof org.apache.hadoop.security.AccessControlException - || curErr instanceof org.apache.hadoop.fs.permission.AccessControlException) { + || curErr.getClass().getName().equals("org.apache.hadoop.fs.permission.AccessControlException")) { Exception newErr = new AccessControlException(curErr.getMessage()); newErr.initCause(err); return newErr;