diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index d607f61..1046ea4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -47,6 +47,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.FileUtils; @@ -470,10 +471,13 @@ public HiveTxnManager getTxnMgr() { public HadoopShims.HdfsEncryptionShim getHdfsEncryptionShim(FileSystem fs) throws HiveException { if (!hdfsEncryptionShims.containsKey(fs.getUri())) { try { - if ("hdfs".equals(fs.getUri().getScheme())) { - hdfsEncryptionShims.put(fs.getUri(), ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, sessionConf)); + String scheme = fs.getUri().getScheme(); + if ("hdfs".equals(scheme) || FsConstants.VIEWFS_SCHEME.equals(scheme)) { + hdfsEncryptionShims.put(fs.getUri(), + ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, sessionConf)); } else { - LOG.info("Could not get hdfsEncryptionShim, it is only applicable to hdfs filesystem."); + LOG.info("Could not get hdfsEncryptionShim, " + + "it is only applicable to hdfs or viewfs filesystem."); } } catch (Exception e) { throw new HiveException(e); diff --git ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index fdebb94..4500597 100644 --- ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -30,6 +30,11 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FsConstants; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.viewfs.ConfigUtil; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -260,4 +265,19 @@ public void testReloadExistingAuxJars2() { } } } + + @Test + public void testGetViewfsEncryptionShim() { + HiveConf conf = new HiveConf(); + SessionState ss = new SessionState(conf); + ConfigUtil.addLink(conf, "/user", new Path("file:///","user").toUri()); + try { + FileSystem fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf); + Assert.assertEquals(FsConstants.VIEWFS_SCHEME, fsView.getUri().getScheme()); + Assert.assertNotNull(ss.getHdfsEncryptionShim(fsView)); + } catch (HiveException | IOException e) { + Assert.fail(e.getMessage()); + } + } + }