From 4575facb1de2c4b4bbeb56647d70b9bff73c3c5f Mon Sep 17 00:00:00 2001 From: Ashutosh Chauhan Date: Tue, 3 Oct 2017 17:52:02 -0700 Subject: [PATCH] HIVE-15016 : Run tests with Hadoop 3.0.0-alpha1 --- common/pom.xml | 16 ++++++++++++++++ .../java/org/apache/hive/hcatalog/common/HCatUtil.java | 2 +- .../apache/hive/hcatalog/templeton/TestWebHCatE2e.java | 4 ++-- .../hadoop/hive/ql/txn/compactor/TestCompactor.java | 6 +++--- llap-server/pom.xml | 12 ++++++++++++ metastore/pom.xml | 2 +- pom.xml | 12 +++++++++++- serde/pom.xml | 6 ++++++ shims/0.23/pom.xml | 14 +++++++++++++- .../org/apache/hadoop/hive/shims/Hadoop23Shims.java | 10 +++++----- .../java/org/apache/hadoop/fs/ProxyFileSystem.java | 5 +++++ shims/scheduler/pom.xml | 8 +++++++- standalone-metastore/pom.xml | 18 +++++++++++++++++- .../apache/hadoop/hive/metastore/utils/HdfsUtils.java | 10 +++++----- 14 files changed, 104 insertions(+), 21 deletions(-) diff --git a/common/pom.xml b/common/pom.xml index fb80db7706..4f0398663d 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -152,6 +152,14 @@ commmons-logging commons-logging + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + @@ -176,6 +184,14 @@ commmons-logging commons-logging + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java index 81804cfb2e..f83bf9a31f 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java @@ -739,7 +739,7 @@ public static void copyJobPropertiesToJobConf( public static boolean isHadoop23() { String version = org.apache.hadoop.util.VersionInfo.getVersion(); - if (version.matches("\\b0\\.23\\..+\\b")||version.matches("\\b2\\..*")) + if (version.matches("\\b2\\..*")||version.matches("\\b3\\..*")) return true; return false; } diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java index 22d2cc6bff..341314b218 100644 --- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java +++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java @@ -238,7 +238,7 @@ public void getHadoopVersion() throws Exception { Map props = JsonBuilder.jsonToMap(p.responseBody); Assert.assertEquals("hadoop", props.get("module")); Assert.assertTrue(p.getAssertMsg(), - ((String)props.get("version")).matches("[1-2].[0-9]+.[0-9]+.*")); + ((String)props.get("version")).matches("[1-3].[0-9]+.[0-9]+.*")); } @Test @@ -356,4 +356,4 @@ private static MethodCallRetVal doHttpCall(String uri, HTTP_METHOD_TYPE type, Ma } return new MethodCallRetVal(-1, "Http " + type + " failed; see log file for details", actualUri, method.getName()); } -} \ No newline at end of file +} diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java index aea1dfc6f4..1dc4721dba 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java @@ -1207,7 +1207,7 @@ public void testTableProperties() throws Exception { t.init(stop, looped); t.run(); JobConf job = t.getMrJob(); - Assert.assertEquals("2048", job.get("mapreduce.map.memory.mb")); // 2048 comes from tblproperties + Assert.assertEquals("2048", job.getMemoryForMapTask()); // 2048 comes from tblproperties // Compact ttp1 stop = new AtomicBoolean(true); t = new Worker(); @@ -1217,7 +1217,7 @@ public void testTableProperties() throws Exception { t.init(stop, looped); t.run(); job = t.getMrJob(); - Assert.assertEquals("1024", job.get("mapreduce.map.memory.mb")); // 1024 is the default value + Assert.assertEquals("1024", job.getMemoryForMapTask()); // 1024 is the default value // Clean up runCleaner(conf); rsp = txnHandler.showCompact(new ShowCompactRequest()); @@ -1269,7 +1269,7 @@ public void testTableProperties() throws Exception { t.init(stop, looped); t.run(); job = t.getMrJob(); - Assert.assertEquals("3072", job.get("mapreduce.map.memory.mb")); + Assert.assertEquals("3072", job.getMemoryForMapTask()); Assert.assertTrue(job.get("hive.compactor.table.props").contains("orc.compress.size4:8192")); } diff --git a/llap-server/pom.xml b/llap-server/pom.xml index 47a04cc310..39381434ab 100644 --- a/llap-server/pom.xml +++ b/llap-server/pom.xml @@ -122,6 +122,14 @@ true + org.apache.hadoop + hadoop-yarn-client + + + org.apache.hadoop + hadoop-yarn-api + + org.slf4j slf4j-log4j12 @@ -189,6 +197,10 @@ hadoop-yarn-client + org.apache.hadoop + hadoop-yarn-api + + org.apache.hadoop hadoop-yarn-server-web-proxy diff --git a/metastore/pom.xml b/metastore/pom.xml index 5430580749..9e58e63277 100644 --- a/metastore/pom.xml +++ b/metastore/pom.xml @@ -155,7 +155,7 @@ org.apache.hadoop - hadoop-hdfs + hadoop-hdfs-client ${hadoop.version} true diff --git a/pom.xml b/pom.xml index 52e53012b9..999b52a355 100644 --- a/pom.xml +++ b/pom.xml @@ -142,7 +142,7 @@ 14.0.1 2.4.11 1.3.166 - 2.8.1 + 3.0.0-beta1 ${basedir}/${hive.path.to.root}/testutils/hadoop 1.3 1.1.1 @@ -717,6 +717,10 @@ ${hadoop.version} + com.codahale.metrics + metrics-core + + org.slf4j slf4j-log4j12 @@ -828,6 +832,12 @@ org.apache.hbase hbase-hadoop-compat ${hbase.version} + + + com.codahale.metrics + metrics-core + + org.apache.hbase diff --git a/serde/pom.xml b/serde/pom.xml index 7419cfb17a..3cd3e161bd 100644 --- a/serde/pom.xml +++ b/serde/pom.xml @@ -150,6 +150,12 @@ + + org.apache.hadoop + hadoop-hdfs-client + ${hadoop.version} + test + org.apache.hadoop hadoop-hdfs diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml index 3ff1d38776..002fb8c3db 100644 --- a/shims/0.23/pom.xml +++ b/shims/0.23/pom.xml @@ -62,12 +62,18 @@ - + org.apache.hadoop hadoop-hdfs ${hadoop.version} true + + org.apache.hadoop + hadoop-hdfs-client + ${hadoop.version} + true + org.apache.hadoop hadoop-hdfs @@ -198,6 +204,12 @@ ${hadoop.version} true test-jar + + + com.codahale.metrics + metrics-core + + org.apache.hadoop diff --git a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java index e9445eb11d..02f0ce8688 100644 --- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java +++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java @@ -1128,17 +1128,17 @@ public Boolean run() throws Exception { @Override public boolean runDistCp(List srcPaths, Path dst, Configuration conf) throws IOException { - DistCpOptions options = new DistCpOptions(srcPaths, dst); - options.setSyncFolder(true); - options.setSkipCRC(true); - options.preserve(FileAttribute.BLOCKSIZE); + DistCpOptions.Builder builder = new DistCpOptions.Builder(srcPaths, dst); + builder.withSyncFolder(true); + builder.withCRC(true); + builder.preserve(FileAttribute.BLOCKSIZE); // Creates the command-line parameters for distcp List params = constructDistCpParams(srcPaths, dst, conf); try { conf.setBoolean("mapred.mapper.new-api", true); - DistCp distcp = new DistCp(conf, options); + DistCp distcp = new DistCp(conf, builder.build()); // HIVE-13704 states that we should use run() instead of execute() due to a hadoop known issue // added by HADOOP-10459 diff --git a/shims/common/src/main/java/org/apache/hadoop/fs/ProxyFileSystem.java b/shims/common/src/main/java/org/apache/hadoop/fs/ProxyFileSystem.java index 2c37a51cf4..ba4959cc91 100644 --- a/shims/common/src/main/java/org/apache/hadoop/fs/ProxyFileSystem.java +++ b/shims/common/src/main/java/org/apache/hadoop/fs/ProxyFileSystem.java @@ -264,6 +264,11 @@ public ContentSummary getContentSummary(Path f) throws IOException { } @Override + public FileStatus getFileLinkStatus(Path f) throws IOException { + return swizzleFileStatus(super.getFileLinkStatus(swizzleParamPath(f)), false); + } + + @Override public FileStatus getFileStatus(Path f) throws IOException { return swizzleFileStatus(super.getFileStatus(swizzleParamPath(f)), false); } diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml index 0eadb69435..a5809fa70b 100644 --- a/shims/scheduler/pom.xml +++ b/shims/scheduler/pom.xml @@ -87,8 +87,14 @@ org.apache.hadoop hadoop-yarn-server-tests ${hadoop.version} - true + true test-jar + + + com.codahale.metrics + metrics-core + + diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml index d91b22de50..47eaec6f77 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml @@ -89,7 +89,7 @@ ${hadoop.version} provided - + org.apache.hadoop hadoop-hdfs ${hadoop.version} @@ -105,6 +105,22 @@ + + org.apache.hadoop + hadoop-hdfs-client + ${hadoop.version} + true + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + org.apache.hive diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java index c10e36f94a..8930e2212e 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java @@ -147,17 +147,17 @@ public Boolean run() throws Exception { public static boolean runDistCp(List srcPaths, Path dst, Configuration conf) throws IOException { - DistCpOptions options = new DistCpOptions(srcPaths, dst); - options.setSyncFolder(true); - options.setSkipCRC(true); - options.preserve(DistCpOptions.FileAttribute.BLOCKSIZE); + DistCpOptions.Builder builder = new DistCpOptions.Builder(srcPaths, dst); + builder.withSyncFolder(true); + builder.withCRC(true); + builder.preserve(DistCpOptions.FileAttribute.BLOCKSIZE); // Creates the command-line parameters for distcp List params = constructDistCpParams(srcPaths, dst, conf); try { conf.setBoolean("mapred.mapper.new-api", true); - DistCp distcp = new DistCp(conf, options); + DistCp distcp = new DistCp(conf, builder.build()); // HIVE-13704 states that we should use run() instead of execute() due to a hadoop known issue // added by HADOOP-10459 -- 2.13.5 (Apple Git-94)