encryptionZoneToCmrootMapping = new HashMap<>();
- private HadoopShims hadoopShims;
private static Configuration conf;
private String msUser;
private String msGroup;
@@ -161,7 +158,6 @@ private ReplChangeManager(Configuration conf) throws MetaException {
if (MetastoreConf.getBoolVar(conf, ConfVars.REPLCMENABLED)) {
ReplChangeManager.enabled = true;
ReplChangeManager.conf = conf;
- hadoopShims = ShimLoader.getHadoopShims();
cmRootDir = MetastoreConf.getVar(conf, ConfVars.REPLCMDIR);
encryptedCmRootDir = MetastoreConf.getVar(conf, ConfVars.REPLCMENCRYPTEDDIR);
fallbackNonEncryptedCmRootDir = MetastoreConf.getVar(conf, ConfVars.REPLCMFALLBACKNONENCRYPTEDDIR);
@@ -174,12 +170,10 @@ private ReplChangeManager(Configuration conf) throws MetaException {
Path cmroot = new Path(cmRootDir);
createCmRoot(cmroot);
FileSystem cmRootFs = cmroot.getFileSystem(conf);
- HdfsEncryptionShim pathEncryptionShim = hadoopShims
- .createHdfsEncryptionShim(cmRootFs, conf);
- if (pathEncryptionShim.isPathEncrypted(cmroot)) {
+ if (EncryptionFileUtils.isPathEncrypted(cmroot, conf)) {
//If cm root is encrypted we keep using it for the encryption zone
String encryptionZonePath = cmRootFs.getUri()
- + pathEncryptionShim.getEncryptionZoneForPath(cmroot).getPath();
+ + EncryptionFileUtils.getEncryptionZoneForPath(cmroot, conf).getPath();
encryptionZoneToCmrootMapping.put(encryptionZonePath, cmRootDir);
} else {
encryptionZoneToCmrootMapping.put(NO_ENCRYPTION, cmRootDir);
@@ -190,7 +184,7 @@ private ReplChangeManager(Configuration conf) throws MetaException {
throw new MetaException(ConfVars.REPLCMENCRYPTEDDIR.getHiveName() + " should be absolute path");
}
createCmRoot(cmRootFallback);
- if (pathEncryptionShim.isPathEncrypted(cmRootFallback)) {
+ if (EncryptionFileUtils.isPathEncrypted(cmRootFallback, conf)) {
throw new MetaException(ConfVars.REPLCMFALLBACKNONENCRYPTEDDIR.getHiveName()
+ " should not be encrypted");
}
@@ -566,10 +560,9 @@ Path getCmRoot(Path path) throws IOException {
String cmrootDir = fallbackNonEncryptedCmRootDir;
String encryptionZonePath = NO_ENCRYPTION;
if (enabled) {
- HdfsEncryptionShim pathEncryptionShim = hadoopShims.createHdfsEncryptionShim(path.getFileSystem(conf), conf);
- if (pathEncryptionShim.isPathEncrypted(path)) {
+ if (EncryptionFileUtils.isPathEncrypted(path, conf)) {
encryptionZonePath = path.getFileSystem(conf).getUri()
- + pathEncryptionShim.getEncryptionZoneForPath(path).getPath();
+ + EncryptionFileUtils.getEncryptionZoneForPath(path, conf).getPath();
//For encryption zone, create cm at the relative path specified by hive.repl.cm.encryptionzone.rootdir
//at the root of the encryption zone
cmrootDir = encryptionZonePath + Path.SEPARATOR + encryptedCmRootDir;
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/EncryptionFileUtils.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/EncryptionFileUtils.java
new file mode 100644
index 0000000000..f2e4218736
--- /dev/null
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/EncryptionFileUtils.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.utils;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.client.HdfsAdmin;
+import org.apache.hadoop.hdfs.protocol.EncryptionZone;
+
+import java.io.IOException;
+import java.net.URI;
+
+public class EncryptionFileUtils {
+
+ public static boolean isPathEncrypted(Path path, Configuration conf) throws IOException {
+ Path fullPath;
+ if (path.isAbsolute()) {
+ fullPath = path;
+ } else {
+ fullPath = path.getFileSystem(conf).makeQualified(path);
+ }
+ if(!"hdfs".equalsIgnoreCase(path.toUri().getScheme())) {
+ return false;
+ }
+ return (EncryptionFileUtils.getEncryptionZoneForPath(fullPath, conf) != null);
+ }
+
+ public static EncryptionZone getEncryptionZoneForPath(Path path, Configuration conf) throws IOException {
+ URI uri = path.getFileSystem(conf).getUri();
+ if ("hdfs".equals(uri.getScheme())) {
+ HdfsAdmin hdfsAdmin = new HdfsAdmin(uri, conf);
+ if (path.getFileSystem(conf).exists(path)) {
+ return hdfsAdmin.getEncryptionZoneForPath(path);
+ } else if (!path.getParent().equals(path)) {
+ return getEncryptionZoneForPath(path.getParent(), conf);
+ } else {
+ return null;
+ }
+ }
+ return null;
+ }
+
+ public static void createEncryptionZone(Path path, String keyName, Configuration conf) throws IOException {
+ URI uri = path.getFileSystem(conf).getUri();
+ if ("hdfs".equals(uri.getScheme())) {
+ HdfsAdmin hdfsAdmin = new HdfsAdmin(uri, conf);
+ hdfsAdmin.createEncryptionZone(path, keyName);
+ }
+ }
+}