diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java index 974c74e..e1228cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java @@ -19,6 +19,8 @@ import java.io.File; import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.Map; import org.apache.hadoop.fs.FileSystem; @@ -28,6 +30,8 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * SecureCmdDoAs - Helper class for setting parameters and env necessary for @@ -36,6 +40,9 @@ * */ public class SecureCmdDoAs { + + private static final Logger LOG = LoggerFactory.getLogger(SecureCmdDoAs.class); + private final Path tokenPath; private final File tokenFile; @@ -45,7 +52,18 @@ public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{ String uname = UserGroupInformation.getLoginUser().getShortUserName(); FileSystem fs = FileSystem.get(conf); Credentials cred = new Credentials(); - ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); + + ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first + for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) { + try { + ShimLoader.getHadoopShims().addDelegationTokens( + FileSystem.get(new URI(uri), conf), + cred, uname); + } catch (URISyntaxException e) { + LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e); + } + } + tokenFile = File.createTempFile("hive_hadoop_delegation_token", null); tokenPath = new Path(tokenFile.toURI());