From 0a70b0a7294e9960c0559fac50d1a0bb531568ce Mon Sep 17 00:00:00 2001 From: wellington Date: Mon, 13 Jun 2016 11:33:19 +0100 Subject: [PATCH] HBASE-15860.master.002 --- .../org/apache/hadoop/hbase/util/FSHDFSUtils.java | 55 +++++----------------- .../apache/hadoop/hbase/util/TestFSHDFSUtils.java | 17 +++++++ 2 files changed, 28 insertions(+), 44 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java index 0d880d0..8783672 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java @@ -27,19 +27,20 @@ import java.net.URI; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.Collection; -import com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; +import com.google.common.collect.Sets; + /** * Implementation for hdfs @@ -48,8 +49,6 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; @InterfaceStability.Evolving public class FSHDFSUtils extends FSUtils { private static final Log LOG = LogFactory.getLog(FSHDFSUtils.class); - private static Class dfsUtilClazz; - private static Method getNNAddressesMethod; /** * @param fs @@ -63,33 +62,12 @@ public class FSHDFSUtils extends FSUtils { if (serviceName.startsWith("ha-hdfs")) { try { - if (dfsUtilClazz == null) { - dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil"); - } - if (getNNAddressesMethod == null) { - try { - // getNNServiceRpcAddressesForCluster is available only in version - // equal to or later than Hadoop 2.6 - getNNAddressesMethod = - dfsUtilClazz.getMethod("getNNServiceRpcAddressesForCluster", Configuration.class); - } catch (NoSuchMethodException e) { - // If hadoop version is older than hadoop 2.6 - getNNAddressesMethod = - dfsUtilClazz.getMethod("getNNServiceRpcAddresses", Configuration.class); - } - - } - - Map> addressMap = - (Map>) getNNAddressesMethod - .invoke(null, conf); - String nameService = serviceName.substring(serviceName.indexOf(":") + 1); - if (addressMap.containsKey(nameService)) { - Map nnMap = addressMap.get(nameService); - for (Map.Entry e2 : nnMap.entrySet()) { - InetSocketAddress addr = e2.getValue(); - addresses.add(addr); - } + String nameServiceId = serviceName.split(":")[1]; + Map nnMap = + DFSUtil.getRpcAddressesForNameserviceId(conf, nameServiceId, null); + for (Map.Entry e2 : nnMap.entrySet()) { + InetSocketAddress addr = e2.getValue(); + addresses.add(addr); } } catch (Exception e) { LOG.warn("DFSUtil.getNNServiceRpcAddresses failed. serviceName=" + serviceName, e); @@ -126,17 +104,6 @@ public class FSHDFSUtils extends FSUtils { if (srcServiceName.equals(desServiceName)) { return true; } - if (srcServiceName.startsWith("ha-hdfs") && desServiceName.startsWith("ha-hdfs")) { - Collection internalNameServices = - conf.getTrimmedStringCollection("dfs.internal.nameservices"); - if (!internalNameServices.isEmpty()) { - if (internalNameServices.contains(srcServiceName.split(":")[1])) { - return true; - } else { - return false; - } - } - } if (srcFs instanceof DistributedFileSystem && desFs instanceof DistributedFileSystem) { //If one serviceName is an HA format while the other is a non-HA format, // maybe they refer to the same FileSystem. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java index ea19ea7..d5369bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java @@ -143,6 +143,23 @@ public class TestFSHDFSUtils { desPath = new Path("/"); desFs = desPath.getFileSystem(conf); assertTrue(!FSHDFSUtils.isSameHdfs(conf, srcFs, desFs)); + + conf.set("dfs.nameservices", "haosong-hadoop,ns-2"); + conf.set("dfs.ha.namenodes.ns-2", "nn1,nn2"); + conf.set("dfs.client.failover.proxy.provider.ns-2", + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"); + conf.set("dfs.namenode.rpc-address.ns-2.nn1", "127.0.0.10:8020"); + conf.set("dfs.namenode.rpc-address.ns-2.nn2", "127.10.2.10:8000"); + + srcPath = new Path("hdfs://haosong-hadoop/src"); + srcFs = srcPath.getFileSystem(conf); + desPath = new Path("hdfs://ns-2/"); + desFs = desPath.getFileSystem(conf); + assertTrue(!FSHDFSUtils.isSameHdfs(conf, srcFs, desFs)); + + desPath = new Path("hdfs://haosong-hadoop/other"); + desFs = desPath.getFileSystem(conf); + assertTrue(FSHDFSUtils.isSameHdfs(conf, srcFs, desFs)); } /** -- 2.4.9 (Apple Git-60)