diff --git a/shims/0.20S/pom.xml b/shims/0.20S/pom.xml
index 040136b..7d41d28 100644
--- a/shims/0.20S/pom.xml
+++ b/shims/0.20S/pom.xml
@@ -54,4 +54,16 @@
true
+
+
+ hadoop-1
+
+
+ org.apache.hadoop
+ hadoop-tools
+ ${hadoop-20S.version}
+
+
+
+
diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java b/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
index a56309f..6c6ccbc 100644
--- a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
+++ b/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
@@ -19,7 +19,6 @@
import java.io.IOException;
import java.lang.Override;
-import java.lang.reflect.Constructor;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URI;
@@ -27,6 +26,7 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
@@ -71,6 +71,8 @@
import org.apache.hadoop.security.KerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.tools.distcp2.DistCp;
+import org.apache.hadoop.tools.distcp2.DistCpOptions;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.VersionInfo;
@@ -503,7 +505,7 @@ public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus,
}
public class Hadoop20SFileStatus implements HdfsFileStatus {
- private FileStatus fileStatus;
+ private final FileStatus fileStatus;
public Hadoop20SFileStatus(FileStatus fileStatus) {
this.fileStatus = fileStatus;
}
@@ -629,28 +631,33 @@ public KerberosNameShim getKerberosNameShim(String name) throws IOException {
*/
public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim {
- private KerberosName kerberosName;
+ private final KerberosName kerberosName;
public KerberosNameShim(String name) {
kerberosName = new KerberosName(name);
}
+ @Override
public String getDefaultRealm() {
return kerberosName.getDefaultRealm();
}
+ @Override
public String getServiceName() {
return kerberosName.getServiceName();
}
+ @Override
public String getHostName() {
return kerberosName.getHostName();
}
+ @Override
public String getRealm() {
return kerberosName.getRealm();
}
+ @Override
public String getShortName() throws IOException {
return kerberosName.getShortName();
}
@@ -663,27 +670,17 @@ public StoragePolicyShim getStoragePolicyShim(FileSystem fs) {
@Override
public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException {
- int rc;
-
- // Creates the command-line parameters for distcp
- String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()};
+ DistCpOptions options = new DistCpOptions(Collections.singletonList(src), dst);
+ options.setSkipCRC(true);
+ options.setSyncFolder(true);
try {
- Class clazzDistCp = Class.forName("org.apache.hadoop.tools.distcp2");
- Constructor c = clazzDistCp.getConstructor();
- c.setAccessible(true);
- Tool distcp = (Tool)c.newInstance();
- distcp.setConf(conf);
- rc = distcp.run(params);
- } catch (ClassNotFoundException e) {
- throw new IOException("Cannot find DistCp class package: " + e.getMessage());
- } catch (NoSuchMethodException e) {
- throw new IOException("Cannot get DistCp constructor: " + e.getMessage());
+ DistCp distcp = new DistCp(conf, options);
+ distcp.execute();
+ return true;
} catch (Exception e) {
throw new IOException("Cannot execute DistCp process: " + e, e);
}
-
- return (0 == rc) ? true : false;
}
@Override
diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index e5be8d6..29d0f13 100644
--- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -19,7 +19,6 @@
import java.io.FileNotFoundException;
import java.io.IOException;
-import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
@@ -28,12 +27,12 @@
import java.security.AccessControlException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.lang.StringUtils;
@@ -64,7 +63,6 @@
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
@@ -93,8 +91,9 @@
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.tools.DistCp;
+import org.apache.hadoop.tools.DistCpOptions;
import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.Tool;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.tez.test.MiniTezCluster;
@@ -121,7 +120,7 @@ public Hadoop23Shims() {
zcr = true;
} catch (ClassNotFoundException ce) {
}
-
+
if (zcr) {
// in-memory HDFS is only available after zcr
try {
@@ -576,11 +575,11 @@ public TaskAttemptID createTaskAttemptID() {
org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
try {
- java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContextImpl.class.getDeclaredConstructor(
+ java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContextImpl.class.getDeclaredConstructor(
org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
Reporter.class);
construct.setAccessible(true);
- newContext = (org.apache.hadoop.mapred.TaskAttemptContext) construct.newInstance(
+ newContext = construct.newInstance(
new JobConf(conf), taskId, progressable);
} catch (Exception e) {
throw new RuntimeException(e);
@@ -913,6 +912,7 @@ public LocatedFileStatus next() throws IOException {
* Cannot add Override annotation since FileSystem.access() may not exist in
* the version of hadoop used to build Hive.
*/
+ @Override
public void access(Path path, FsAction action) throws AccessControlException,
FileNotFoundException, IOException {
Path underlyingFsPath = swizzleParamPath(path);
@@ -1179,7 +1179,7 @@ public void setStoragePolicy(Path path, StoragePolicyValue policy)
}
}
}
-
+
@Override
public HadoopShims.StoragePolicyShim getStoragePolicyShim(FileSystem fs) {
@@ -1195,27 +1195,17 @@ public void setStoragePolicy(Path path, StoragePolicyValue policy)
@Override
public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException {
- int rc;
-
- // Creates the command-line parameters for distcp
- String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()};
+ DistCpOptions options = new DistCpOptions(Collections.singletonList(src), dst);
+ options.setSkipCRC(true);
+ options.setSyncFolder(true);
try {
- Class clazzDistCp = Class.forName("org.apache.hadoop.tools.DistCp");
- Constructor c = clazzDistCp.getConstructor();
- c.setAccessible(true);
- Tool distcp = (Tool)c.newInstance();
- distcp.setConf(conf);
- rc = distcp.run(params);
- } catch (ClassNotFoundException e) {
- throw new IOException("Cannot find DistCp class package: " + e.getMessage());
- } catch (NoSuchMethodException e) {
- throw new IOException("Cannot get DistCp constructor: " + e.getMessage());
+ DistCp distcp = new DistCp(conf, options);
+ distcp.execute();
+ return true;
} catch (Exception e) {
throw new IOException("Cannot execute DistCp process: " + e, e);
}
-
- return (0 == rc);
}
private static Boolean hdfsEncryptionSupport;
@@ -1250,7 +1240,7 @@ public static boolean isHdfsEncryptionSupported() {
*/
private KeyProvider keyProvider = null;
- private Configuration conf;
+ private final Configuration conf;
public HdfsEncryptionShim(URI uri, Configuration conf) throws IOException {
DistributedFileSystem dfs = (DistributedFileSystem)FileSystem.get(uri, conf);
@@ -1409,6 +1399,7 @@ public int readByteBuffer(FSDataInputStream file, ByteBuffer dest) throws IOExce
}
return result;
}
+ @Override
public void addDelegationTokens(FileSystem fs, Credentials cred, String uname) throws IOException {
// Use method addDelegationTokens instead of getDelegationToken to get all the tokens including KMS.
fs.addDelegationTokens(uname, cred);