diff --git accumulo-handler/pom.xml accumulo-handler/pom.xml
index 4e3a087..a330e94 100644
--- accumulo-handler/pom.xml
+++ accumulo-handler/pom.xml
@@ -91,6 +91,24 @@
slf4j-api
+ org.apache.hadoop
+ hadoop-client
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
junit
junit
test
@@ -102,49 +120,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-client
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-client
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git beeline/pom.xml beeline/pom.xml
index d46ac3c..391d589 100644
--- beeline/pom.xml
+++ beeline/pom.xml
@@ -81,6 +81,12 @@
${jline.version}
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
org.apache.thrift
libthrift
${libthrift.version}
@@ -106,6 +112,12 @@
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
junit
junit
${junit.version}
@@ -119,38 +131,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git cli/pom.xml cli/pom.xml
index 88a815c..a2b9551 100644
--- cli/pom.xml
+++ cli/pom.xml
@@ -96,8 +96,26 @@
libthrift
${libthrift.version}
-
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+
+
junit
junit
${junit.version}
@@ -111,49 +129,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git common/pom.xml common/pom.xml
index dba814d..1ab4c57 100644
--- common/pom.xml
+++ common/pom.xml
@@ -95,6 +95,18 @@
ant
${ant.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
com.google.code.tempus-fugit
@@ -137,35 +149,6 @@
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
-
-
-
dist
diff --git contrib/pom.xml contrib/pom.xml
index 8f2ffe4..51602d4 100644
--- contrib/pom.xml
+++ contrib/pom.xml
@@ -60,6 +60,19 @@
commons-logging
${commons-logging.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
junit
@@ -69,37 +82,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git hbase-handler/pom.xml hbase-handler/pom.xml
index a4a9752..a6801eb 100644
--- hbase-handler/pom.xml
+++ hbase-handler/pom.xml
@@ -50,8 +50,93 @@
commons-logging
${commons-logging.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ tests
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test-jar
+ test
+
+
+ com.sun.jersey
+ jersey-servlet
+ ${jersey.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
junit
junit
${junit.version}
@@ -64,167 +149,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- tests
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test-jar
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- test-jar
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test-jar
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- tests
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test-jar
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test-jar
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test-jar
- test
-
-
- com.sun.jersey
- jersey-servlet
- ${jersey.version}
- test
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git hcatalog/core/pom.xml hcatalog/core/pom.xml
index 58ddeee..70297bf 100644
--- hcatalog/core/pom.xml
+++ hcatalog/core/pom.xml
@@ -78,123 +78,87 @@
jackson-mapper-asl
${jackson.version}
+
+ org.apache.hadoop
+ hadoop-annotations
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+
+
+
+ com.sun.jersey
+ jersey-servlet
+ ${jersey.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-hs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-tests
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.pig
+ pig
+ ${pig.version}
+ h2
+ test
+
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
-
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-annotations
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-archives
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
-
-
-
- com.sun.jersey
- jersey-servlet
- ${jersey.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-hs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-yarn-server-tests
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
- h2
- test
-
-
-
-
-
diff --git hcatalog/hcatalog-pig-adapter/pom.xml hcatalog/hcatalog-pig-adapter/pom.xml
index ca9039a..fa02a36 100644
--- hcatalog/hcatalog-pig-adapter/pom.xml
+++ hcatalog/hcatalog-pig-adapter/pom.xml
@@ -45,6 +45,42 @@
hive-hcatalog-core
${project.version}
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
+
+ org.apache.pig
+ pig
+ ${pig.version}
+ h2
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+
+ joda-time
+ joda-time
+ 2.2
+
+
org.apache.hive.hcatalog
@@ -66,103 +102,33 @@
test-jar
test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-common
+ ${hadoop.version}
+ true
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ com.sun.jersey
+ jersey-servlet
+ test
+
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
-
-
-
- joda-time
- joda-time
- 2.2
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
- org.apache.pig
- pig
- ${pig.version}
- h2
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
-
- joda-time
- joda-time
- 2.2
-
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
- ${hadoop-23.version}
- true
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- com.sun.jersey
- jersey-servlet
- test
-
-
-
-
-
diff --git hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index df3b72a..676a4ed 100644
--- hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -18,22 +18,7 @@
*/
package org.apache.hive.hcatalog.pig;
-import java.io.File;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
import org.apache.commons.io.FileUtils;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -54,7 +39,6 @@
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
-
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
@@ -66,22 +50,35 @@
import org.apache.hive.hcatalog.common.HCatUtil;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.apache.hive.hcatalog.data.Pair;
-
import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.data.Tuple;
-
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.junit.Assert.*;
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
@RunWith(Parameterized.class)
@@ -100,9 +97,6 @@
private HadoopShims.MiniDFSShim dfs = null;
private HadoopShims.HdfsEncryptionShim hes = null;
private final String[] testOnlyCommands = new String[]{"crypto"};
- private final String[] encryptionUnsupportedHadoopVersion = new String[]{ShimLoader
- .HADOOP20SVERSIONNAME};
- private boolean isEncryptionTestEnabled = true;
private Driver driver;
private Map> basicInputData;
private static List readRecords = new ArrayList();
@@ -196,7 +190,6 @@ public void setup() throws Exception {
driver = new Driver(hiveConf);
- checkShimLoaderVersion();
initEncryptionShim(hiveConf);
String encryptedTablePath = TEST_WAREHOUSE_DIR + "/encryptedTable";
SessionState.start(new CliSessionState(hiveConf));
@@ -231,19 +224,7 @@ public void setup() throws Exception {
server.executeBatch();
}
- void checkShimLoaderVersion() {
- for (String v : encryptionUnsupportedHadoopVersion) {
- if (ShimLoader.getMajorVersion().equals(v)) {
- isEncryptionTestEnabled = false;
- return;
- }
- }
- }
-
void initEncryptionShim(HiveConf conf) throws IOException {
- if (!isEncryptionTestEnabled) {
- return;
- }
FileSystem fs;
HadoopShims shims = ShimLoader.getHadoopShims();
conf.set(SECURITY_KEY_PROVIDER_URI_NAME, getKeyProviderURI());
@@ -268,9 +249,6 @@ public static String ensurePathEndsInSlash(String path) {
}
private void associateEncryptionZoneWithPath(String path) throws SQLException, CommandNeedRetryException {
- if (!isEncryptionTestEnabled) {
- return;
- }
LOG.info(this.storageFormat + ": associateEncryptionZoneWithPath");
assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
enableTestOnlyCmd(SessionState.get().getConf());
@@ -289,9 +267,6 @@ private void checkExecutionResponse(CommandProcessorResponse response) {
}
private void removeEncryptionZone() throws SQLException, CommandNeedRetryException {
- if (!isEncryptionTestEnabled) {
- return;
- }
LOG.info(this.storageFormat + ": removeEncryptionZone");
enableTestOnlyCmd(SessionState.get().getConf());
CommandProcessor crypto = getTestCommand("crypto");
@@ -333,7 +308,6 @@ private String getKeyProviderURI() {
@Test
public void testReadDataFromEncryptedHiveTableByPig() throws IOException {
- assumeTrue(isEncryptionTestEnabled);
assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
PigServer server = new PigServer(ExecType.LOCAL);
@@ -356,7 +330,6 @@ public void testReadDataFromEncryptedHiveTableByPig() throws IOException {
@Test
public void testReadDataFromEncryptedHiveTableByHCatMR() throws Exception {
- assumeTrue(isEncryptionTestEnabled);
assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
readRecords.clear();
diff --git hcatalog/pom.xml hcatalog/pom.xml
index 2e145b8..7550eeb 100644
--- hcatalog/pom.xml
+++ hcatalog/pom.xml
@@ -53,51 +53,24 @@
${mockito-all.version}
test
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
+ org.apache.pig
+ pig
+ ${pig.version}
+ h2
+ test
+
-
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
- h2
- test
-
-
-
-
-
-
diff --git hcatalog/server-extensions/pom.xml hcatalog/server-extensions/pom.xml
index 3cd2813..b808b40 100644
--- hcatalog/server-extensions/pom.xml
+++ hcatalog/server-extensions/pom.xml
@@ -56,6 +56,11 @@
jackson-mapper-asl
${jackson.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
org.apache.hive.hcatalog
@@ -103,28 +108,4 @@
test
-
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
-
-
-
diff --git hcatalog/streaming/pom.xml hcatalog/streaming/pom.xml
index ba9f731..39b3abd 100644
--- hcatalog/streaming/pom.xml
+++ hcatalog/streaming/pom.xml
@@ -32,34 +32,6 @@
../..
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- true
-
-
-
-
-
@@ -95,6 +67,18 @@
true
3.3.2
+
+ org.apache.hadoop
+ hadoop-common
+ true
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ true
+ ${hadoop.version}
+
@@ -108,7 +92,7 @@
org.apache.hadoop
hadoop-mapreduce-client-common
test
- ${hadoop-23.version}
+ ${hadoop.version}
diff --git hcatalog/webhcat/java-client/pom.xml hcatalog/webhcat/java-client/pom.xml
index b3f3122..4a8596c 100644
--- hcatalog/webhcat/java-client/pom.xml
+++ hcatalog/webhcat/java-client/pom.xml
@@ -55,6 +55,17 @@
hive-exec
${project.version}
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
org.apache.hive
@@ -72,32 +83,4 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
-
-
-
diff --git hcatalog/webhcat/svr/pom.xml hcatalog/webhcat/svr/pom.xml
index e635cc6..67d73ae 100644
--- hcatalog/webhcat/svr/pom.xml
+++ hcatalog/webhcat/svr/pom.xml
@@ -102,6 +102,26 @@
jul-to-slf4j
${slf4j.version}
+
+ org.apache.hadoop
+ hadoop-auth
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
junit
@@ -110,46 +130,6 @@
test
-
-
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-auth
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
-
-
-
diff --git hplsql/pom.xml hplsql/pom.xml
index fc1c527..b855007 100644
--- hplsql/pom.xml
+++ hplsql/pom.xml
@@ -74,6 +74,12 @@
4.5
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
junit
junit
${junit.version}
@@ -81,31 +87,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
-
-
-
diff --git hwi/pom.xml hwi/pom.xml
index d0533ff..e9686c6 100644
--- hwi/pom.xml
+++ hwi/pom.xml
@@ -65,6 +65,19 @@
jetty-all-server
${jetty.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-common
+ ${hadoop.version}
+ true
+ test
+
org.apache.hive
@@ -86,49 +99,15 @@
${junit.version}
test
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+ test
+
-
-
- hadoop-1
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
- ${hadoop-23.version}
- true
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
- test
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git itests/custom-serde/pom.xml itests/custom-serde/pom.xml
index 078549d..1d3f929 100644
--- itests/custom-serde/pom.xml
+++ itests/custom-serde/pom.xml
@@ -39,31 +39,16 @@
${project.version}
true
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
-
-
+
diff --git itests/hcatalog-unit/pom.xml itests/hcatalog-unit/pom.xml
index eb330e1..ec48a4d 100644
--- itests/hcatalog-unit/pom.xml
+++ itests/hcatalog-unit/pom.xml
@@ -114,255 +114,150 @@
${junit.version}
test
+
+ org.apache.hadoop
+ hadoop-annotations
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-tests
+ ${hadoop.version}
+ test
+ tests
+
+
+ com.sun.jersey
+ jersey-servlet
+ ${jersey.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-hs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ tests
+ test
+
+
+ org.apache.pig
+ pig
+ ${pig.version}
+ h2
+ test
+
+
+
+ joda-time
+ joda-time
+ 2.2
+ test
+
-
-
- hadoop-1
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- tests
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
- test
-
-
-
- joda-time
- joda-time
- 2.2
- test
-
-
-
-
- hadoop-2
-
-
-
- org.apache.hadoop
- hadoop-annotations
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-archives
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-yarn-server-tests
- ${hadoop-23.version}
- test
- tests
-
-
- com.sun.jersey
- jersey-servlet
- ${jersey.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-hs
- ${hadoop-23.version}
- test
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- tests
- test
-
-
- org.apache.pig
- pig
- ${pig.version}
- h2
- test
-
-
-
- joda-time
- joda-time
- 2.2
- test
-
-
-
-
-
diff --git itests/hive-jmh/pom.xml itests/hive-jmh/pom.xml
index 56f4016..1daef26 100644
--- itests/hive-jmh/pom.xml
+++ itests/hive-jmh/pom.xml
@@ -58,36 +58,18 @@
hive-exec
${project.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
-
-
-
diff --git itests/hive-minikdc/pom.xml itests/hive-minikdc/pom.xml
index 3098dac..6a20cad 100644
--- itests/hive-minikdc/pom.xml
+++ itests/hive-minikdc/pom.xml
@@ -130,143 +130,58 @@
${mockito-all.version}
test
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-minikdc
+ ${hadoop.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ test
+
+
+ com.sun.jersey
+ jersey-servlet
+ test
+
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- test
-
-
- com.sun.jersey
- jersey-servlet
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-minikdc
- ${hadoop-23.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hadoop
- hadoop-minicluster
- test
-
-
- com.sun.jersey
- jersey-servlet
- test
-
-
-
-
Windows
diff --git itests/hive-unit-hadoop2/pom.xml itests/hive-unit-hadoop2/pom.xml
index 83ef97c..006db5a 100644
--- itests/hive-unit-hadoop2/pom.xml
+++ itests/hive-unit-hadoop2/pom.xml
@@ -1,4 +1,4 @@
-
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.tez
+ tez-tests
+ ${tez.version}
+ test-jar
+
+
+ commons-logging
+ commons-logging
+ ${commons-logging.version}
+
@@ -122,6 +138,119 @@
${mockito-all.version}
test
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test-jar
+ test
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ test
+
+
+ com.sun.jersey
+ jersey-servlet
+ test
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-hs
+ ${hadoop.version}
+ test
+
+
+ org.apache.tez
+ tez-api
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-runtime-library
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-mapreduce
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-dag
+ ${tez.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-tests
+ ${hadoop.version}
+ test
+ tests
+
+
+ org.apache.hadoop
+ hadoop-yarn-client
+ ${hadoop.version}
+ test
+
@@ -171,233 +300,6 @@
-
- hadoop-1
-
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 2.3.2
-
-
- **/metastore/hbase/**
-
-
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- test
-
-
- com.sun.jersey
- jersey-servlet
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test-jar
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test-jar
- test
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test-jar
- test
-
-
- org.apache.hadoop
- hadoop-minicluster
- test
-
-
- com.sun.jersey
- jersey-servlet
- test
-
-
- org.apache.hadoop
- hadoop-archives
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-hs
- ${hadoop-23.version}
- test
-
-
- commons-logging
- commons-logging
- ${commons-logging.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-tests
- ${hadoop-23.version}
- test
- tests
-
-
- org.apache.hadoop
- hadoop-yarn-client
- ${hadoop-23.version}
- test
-
-
- org.apache.tez
- tez-tests
- ${tez.version}
- test-jar
-
-
- org.apache.tez
- tez-api
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-runtime-library
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-mapreduce
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-dag
- ${tez.version}
- test
-
-
-
diff --git itests/pom.xml itests/pom.xml
index acce713..0686f1f 100644
--- itests/pom.xml
+++ itests/pom.xml
@@ -40,17 +40,12 @@
qtest
qtest-accumulo
hive-jmh
+ hive-unit-hadoop2
+ hive-minikdc
- hadoop-2
-
- hive-unit-hadoop2
- hive-minikdc
-
-
-
spark-test
diff --git itests/qtest-accumulo/pom.xml itests/qtest-accumulo/pom.xml
index 09ae2a2..aafa034 100644
--- itests/qtest-accumulo/pom.xml
+++ itests/qtest-accumulo/pom.xml
@@ -39,6 +39,7 @@
as long as -DskipAccumuloTests is not specified -->
true
0.9.0
+ -mkdir -p
@@ -112,6 +113,12 @@
test
core
+
+
+ commons-logging
+ commons-logging
+ ${commons-logging.version}
+
junit
@@ -139,6 +146,158 @@
${javolution.version}
test
+
+ com.sun.jersey
+ jersey-servlet
+ ${jersey.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-hs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-tests
+ ${hadoop.version}
+ test
+ tests
+
+
+ org.apache.hadoop
+ hadoop-yarn-client
+ ${hadoop.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ tests
+ test
+
+
+ org.apache.tez
+ tez-tests
+ ${tez.version}
+ test-jar
+
+
+ org.apache.tez
+ tez-api
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-runtime-library
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-mapreduce
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-dag
+ ${tez.version}
+ test
+
@@ -149,251 +308,6 @@
- hadoop-1
-
- ${hadoop-20S.version}
- -mkdir
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- tests
- test
-
-
-
-
- hadoop-2
-
- ${hadoop-23.version}
- -mkdir -p
-
-
-
- com.sun.jersey
- jersey-servlet
- ${jersey.version}
- test
-
-
- org.apache.hadoop
- hadoop-archives
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-hs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
- commons-logging
- commons-logging
- ${commons-logging.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-tests
- ${hadoop-23.version}
- test
- tests
-
-
- org.apache.hadoop
- hadoop-yarn-client
- ${hadoop-23.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- tests
- test
-
-
- org.apache.tez
- tez-tests
- ${tez.version}
- test-jar
-
-
- org.apache.tez
- tez-api
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-runtime-library
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-mapreduce
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-dag
- ${tez.version}
- test
-
-
-
-
accumulo-tests
diff --git itests/qtest-spark/pom.xml itests/qtest-spark/pom.xml
index 05a3c9f..e06871a 100644
--- itests/qtest-spark/pom.xml
+++ itests/qtest-spark/pom.xml
@@ -38,7 +38,7 @@
false
false
- ${hadoop-23.version}
+ ${hadoop.version}
-mkdir -p
${basedir}/${hive.path.to.root}/itests/qtest-spark/target/spark
@@ -156,52 +156,52 @@
org.apache.hadoop
hadoop-archives
- ${hadoop-23.version}
+ ${hadoop.version}
test
org.apache.hadoop
hadoop-common
- ${hadoop-23.version}
+ ${hadoop.version}
test
org.apache.hadoop
hadoop-common
- ${hadoop-23.version}
+ ${hadoop.version}
tests
test
org.apache.hadoop
hadoop-hdfs
- ${hadoop-23.version}
+ ${hadoop.version}
tests
test
org.apache.hadoop
hadoop-hdfs
- ${hadoop-23.version}
+ ${hadoop.version}
test
org.apache.hadoop
hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
+ ${hadoop.version}
tests
test
org.apache.hadoop
hadoop-mapreduce-client-hs
- ${hadoop-23.version}
+ ${hadoop.version}
test
org.apache.hadoop
hadoop-mapreduce-client-core
- ${hadoop-23.version}
+ ${hadoop.version}
test
@@ -212,65 +212,65 @@
org.apache.hadoop
hadoop-yarn-server-tests
- ${hadoop-23.version}
+ ${hadoop.version}
test
tests
org.apache.hadoop
hadoop-yarn-client
- ${hadoop-23.version}
+ ${hadoop.version}
test
org.apache.hbase
hbase-common
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
org.apache.hbase
hbase-common
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
tests
org.apache.hbase
hbase-hadoop-compat
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
org.apache.hbase
hbase-hadoop-compat
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
tests
org.apache.hbase
hbase-hadoop2-compat
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
org.apache.hbase
hbase-hadoop2-compat
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
tests
org.apache.hbase
hbase-server
- ${hbase.hadoop2.version}
+ ${hbase.version}
test
org.apache.hbase
hbase-server
- ${hbase.hadoop2.version}
+ ${hbase.version}
tests
test
diff --git itests/qtest/pom.xml itests/qtest/pom.xml
index d5c3009..9504813 100644
--- itests/qtest/pom.xml
+++ itests/qtest/pom.xml
@@ -36,10 +36,16 @@
false
false
+ -mkdir -p
+
+ commons-logging
+ commons-logging
+ ${commons-logging.version}
+
org.apache.hive
@@ -106,7 +112,7 @@
org.apache.hadoop
hadoop-yarn-registry
- ${hadoop-23.version}
+ ${hadoop.version}
true
@@ -117,6 +123,171 @@
${junit.version}
test
+
+ com.sun.jersey
+ jersey-servlet
+ ${jersey.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-hs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ test
+
+
+ org.apache.hive
+ hive-llap-server
+ ${project.version}
+ test
+
+
+ org.apache.hive
+ hive-llap-server
+ ${project.version}
+ test-jar
+ test
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-tests
+ ${hadoop.version}
+ test
+ tests
+
+
+ org.apache.hadoop
+ hadoop-yarn-client
+ ${hadoop.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+ test
+ tests
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ tests
+ test
+
+
+ org.apache.tez
+ tez-tests
+ ${tez.version}
+ test-jar
+
+
+ org.apache.tez
+ tez-api
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-runtime-library
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-mapreduce
+ ${tez.version}
+ test
+
+
+ org.apache.tez
+ tez-dag
+ ${tez.version}
+ test
+
@@ -126,261 +297,9 @@
-
- hadoop-1
-
- ${hadoop-20S.version}
- -mkdir
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- tests
- test
-
-
-
-
- hadoop-2
-
- ${hadoop-23.version}
- -mkdir -p
-
-
-
- com.sun.jersey
- jersey-servlet
- ${jersey.version}
- test
-
-
- org.apache.hadoop
- hadoop-archives
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-hs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- test
-
-
- org.apache.hive
- hive-llap-server
- ${project.version}
- test
-
-
- org.apache.hive
- hive-llap-server
- ${project.version}
- test-jar
- test
-
-
- commons-logging
- commons-logging
- ${commons-logging.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-tests
- ${hadoop-23.version}
- test
- tests
-
-
- org.apache.hadoop
- hadoop-yarn-client
- ${hadoop-23.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
- test
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- tests
- test
-
-
- org.apache.tez
- tez-tests
- ${tez.version}
- test-jar
-
-
- org.apache.tez
- tez-api
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-runtime-library
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-mapreduce
- ${tez.version}
- test
-
-
- org.apache.tez
- tez-dag
- ${tez.version}
- test
-
-
-
-
+
org.codehaus.mojo
@@ -439,7 +358,7 @@
runDisabled="${run_disabled}"
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/compiler/errors/" className="TestParseNegative"
logFile="${project.build.directory}/testparseneggen.log"
- hadoopVersion="${active.hadoop.version}"
+ hadoopVersion="${hadoop.version}"
logDirectory="${project.build.directory}/qfile-results/negative/"
initScript="${initScript}"
cleanupScript="q_test_cleanup.sql"/>
@@ -457,7 +376,7 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestCliDriver"
logFile="${project.build.directory}/testclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
- hadoopVersion="${active.hadoop.version}"
+ hadoopVersion="${hadoop.version}"
initScript="${initScript}"
cleanupScript="q_test_cleanup.sql"/>
@@ -474,7 +393,7 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientnegative/" className="TestNegativeCliDriver"
logFile="${project.build.directory}/testnegativeclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientnegative/"
- hadoopVersion="${active.hadoop.version}"
+ hadoopVersion="${hadoop.version}"
initScript="${initScript}"
cleanupScript="q_test_cleanup.sql"/>
@@ -490,7 +409,7 @@
className="TestCompareCliDriver"
logFile="${project.build.directory}/testcompareclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientcompare/"
- hadoopVersion="${active.hadoop.version}"
+ hadoopVersion="${hadoop.version}"
initScript="${initScript}"
cleanupScript="q_test_cleanup.sql"/>
@@ -507,72 +426,65 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestMinimrCliDriver"
logFile="${project.build.directory}/testminimrclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/clientpositive/"
- hadoopVersion="${active.hadoop.version}"
+ hadoopVersion="${hadoop.version}"
initScript="${initScript}"
cleanupScript="q_test_cleanup.sql"/>
-
-
-
-
+
-
+
-
+
-
-
-
-
@@ -649,7 +561,7 @@
resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" className="TestBeeLineDriver"
logFile="${project.build.directory}/testbeelinedrivergen.log"
logDirectory="${project.build.directory}/qfile-results/beelinepositive/"
- hadoopVersion="${hadoopVersion}"/>
+ hadoopVersion="${hadoop.version}"/>
@@ -666,7 +578,7 @@
resultsDirectory="${basedir}/${hive.path.to.root}/contrib/src/test/results/clientpositive/" className="TestContribCliDriver"
logFile="${project.build.directory}/testcontribclidrivergen.log"
logDirectory="${project.build.directory}/qfile-results/contribclientpositive"
- hadoopVersion="${hadoopVersion}"
+ hadoopVersion="${hadoop.version}"
initScript="${initScript}"
cleanupScript="q_test_cleanup.sql"/>
diff --git itests/test-serde/pom.xml itests/test-serde/pom.xml
index 7a1c622..81b7293 100644
--- itests/test-serde/pom.xml
+++ itests/test-serde/pom.xml
@@ -40,31 +40,13 @@
${project.version}
true
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
-
-
-
diff --git itests/util/pom.xml itests/util/pom.xml
index fdab72c..67e8e86 100644
--- itests/util/pom.xml
+++ itests/util/pom.xml
@@ -86,131 +86,54 @@
${project.version}
tests
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
+
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+
junit
junit
${junit.version}
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+ tests
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+ tests
+
-
-
-
- hadoop-1
-
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 2.3.2
-
-
- **/metastore/hbase/**
-
-
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
- tests
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
- tests
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
- tests
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- test-jar
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
- test-jar
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
- tests
-
-
-
-
-
diff --git jdbc/pom.xml jdbc/pom.xml
index 371d709..012908f 100644
--- jdbc/pom.xml
+++ jdbc/pom.xml
@@ -103,32 +103,16 @@
curator-framework
${curator.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
-
-
dist
true
@@ -136,7 +120,6 @@
-
${basedir}/src/java
diff --git llap-client/pom.xml llap-client/pom.xml
index b7b5803..ff7c82c 100644
--- llap-client/pom.xml
+++ llap-client/pom.xml
@@ -65,6 +65,18 @@
libthrift
${libthrift.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
junit
@@ -84,66 +96,28 @@
${jersey.version}
test
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git llap-server/pom.xml llap-server/pom.xml
index dd8dd7b..42e53b6 100644
--- llap-server/pom.xml
+++ llap-server/pom.xml
@@ -90,8 +90,70 @@
json
${json.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-yarn-registry
+ ${hadoop.version}
+ true
+
+
+ org.apache.tez
+ tez-runtime-internals
+ ${tez.version}
+ true
+
+
+ org.apache.tez
+ tez-runtime-library
+ ${tez.version}
+ true
+
+
+ org.apache.tez
+ tez-mapreduce
+ ${tez.version}
+ true
+
+
+ org.apache.tez
+ tez-dag
+ ${tez.version}
+ true
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
+
junit
junit
${junit.version}
@@ -113,90 +175,6 @@
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-yarn-registry
- ${hadoop-23.version}
- true
-
-
- org.apache.tez
- tez-runtime-internals
- ${tez.version}
- true
-
-
- org.apache.tez
- tez-runtime-library
- ${tez.version}
- true
-
-
- org.apache.tez
- tez-mapreduce
- ${tez.version}
- true
-
-
- org.apache.tez
- tez-dag
- ${tez.version}
- true
-
-
-
-
protobuf
diff --git metastore/pom.xml metastore/pom.xml
index f209d50..4cd1e6d 100644
--- metastore/pom.xml
+++ metastore/pom.xml
@@ -56,6 +56,11 @@
${protobuf.version}
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+
+
com.jolbox
bonecp
${bonecp.version}
@@ -117,6 +122,18 @@
${antlr.version}
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
org.apache.thrift
libfb303
${libfb303.version}
@@ -164,56 +181,6 @@
- hadoop-1
-
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 2.3.2
-
-
- **/hbase/**
-
-
- **/hbase/**
-
-
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hbase
- hbase-client
- ${hbase.hadoop2.version}
-
-
-
-
thriftif
diff --git pom.xml pom.xml
index a400f78..3b3303c 100644
--- pom.xml
+++ pom.xml
@@ -123,11 +123,9 @@
3.1.0
14.0.1
2.4.4
- 1.2.1
- 2.6.0
+ 2.6.0
${basedir}/${hive.path.to.root}/testutils/hadoop
- 0.98.9-hadoop1
- 1.1.1
+ 1.1.1
4.4
4.4
@@ -236,7 +234,6 @@
-
@@ -599,6 +596,87 @@
xercesImpl
${xerces.version}
+
+ org.apache.hadoop
+ hadoop-client
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.apache.httpcomponents
+ httpcore
+
+
+ org.apache.httpcomponents
+ httpclient
+
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-minikdc
+ ${hadoop.version}
+
+
+ org.apache.hbase
+ hbase-common
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-hadoop2-compat
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ ${hadoop.version}
+
+
+ org.scala-lang
+ scala-library
+ ${scala.version}
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+ ${spark.version}
+
+
+ org.apache.hadoop
+ hadoop-core
+
+
+
@@ -1061,146 +1139,6 @@
-
-
-
- hadoop-1
-
-
-
- org.apache.hadoop
- hadoop-client
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-hadoop1-compat
- ${hbase.hadoop1.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop1.version}
-
-
-
-
-
- hadoop-2
-
- llap-server
-
-
-
-
- org.apache.hadoop
- hadoop-client
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
-
-
- org.apache.httpcomponents
- httpcore
-
-
- org.apache.httpcomponents
- httpclient
-
-
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
-
-
- org.apache.hadoop
- hadoop-minikdc
- ${hadoop-23.version}
-
-
- org.apache.hbase
- hbase-common
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-hadoop-compat
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-hadoop2-compat
- ${hbase.hadoop2.version}
-
-
- org.apache.hbase
- hbase-server
- ${hbase.hadoop2.version}
-
-
- org.apache.hadoop
- hadoop-minicluster
- ${hadoop-23.version}
-
-
- org.scala-lang
- scala-library
- ${scala.version}
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
- ${spark.version}
-
-
- org.apache.hadoop
- hadoop-core
-
-
-
-
-
-
windows-test
diff --git ql/pom.xml ql/pom.xml
index 83b9ebf..8ac13a6 100644
--- ql/pom.xml
+++ ql/pom.xml
@@ -174,6 +174,74 @@
${libfb303.version}
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ javax.servlet
+ servlet-api
+
+
+ true
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-common
+ ${hadoop.version}
+ true
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+
+
+ javax.servlet
+ servlet-api
+
+
+ true
+
+
+ org.apache.hadoop
+ hadoop-yarn-api
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-yarn-common
+ ${hadoop.version}
+
+
+ javax.servlet
+ servlet-api
+
+
+ true
+
+
+ org.apache.hadoop
+ hadoop-yarn-client
+ ${hadoop.version}
+ true
+
+
+
org.apache.ivy
ivy
${ivy.version}
@@ -494,91 +562,6 @@
- hadoop-1
-
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 2.3.2
-
-
- **/ATSHook.java
-
-
-
-
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-archives
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
- ${hadoop-23.version}
- true
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-yarn-api
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-yarn-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-yarn-client
- ${hadoop-23.version}
- true
-
-
-
-
protobuf
@@ -722,7 +705,6 @@
org.json:json
org.apache.avro:avro
org.apache.avro:avro-mapred
- org.apache.hive.shims:hive-shims-0.20S
org.apache.hive.shims:hive-shims-0.23
org.apache.hive.shims:hive-shims-0.23
org.apache.hive.shims:hive-shims-common
diff --git serde/pom.xml serde/pom.xml
index b6c0d0c..99c89ed 100644
--- serde/pom.xml
+++ serde/pom.xml
@@ -85,6 +85,18 @@
parquet-hadoop-bundle
${parquet.version}
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
@@ -111,66 +123,28 @@
${jersey.version}
test
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ tests
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+ tests
+ test
+
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- test
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- tests
- test
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- test
-
-
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-23.version}
- tests
- test
-
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git service/pom.xml service/pom.xml
index 07eeb9a..d7ab5bf 100644
--- service/pom.xml
+++ service/pom.xml
@@ -96,7 +96,19 @@
curator-recipes
${curator.version}
-
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+ true
+
+
org.apache.hive
hive-exec
@@ -113,37 +125,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${hadoop-23.version}
- true
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test
diff --git shims/0.20S/pom.xml shims/0.20S/pom.xml
deleted file mode 100644
index 565dd5e..0000000
--- shims/0.20S/pom.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-
-
-
- 4.0.0
-
- org.apache.hive
- hive
- 2.0.0-SNAPSHOT
- ../../pom.xml
-
-
- org.apache.hive.shims
- hive-shims-0.20S
- jar
- Hive Shims 0.20S
-
-
- ../..
-
-
-
-
-
-
- org.apache.hive.shims
- hive-shims-common
- ${project.version}
-
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-test
- ${hadoop-20S.version}
- true
-
-
- org.apache.hadoop
- hadoop-tools
- ${hadoop-20S.version}
- provided
-
-
-
diff --git shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
deleted file mode 100644
index f60e8f0..0000000
--- shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
+++ /dev/null
@@ -1,734 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.shims;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.net.URL;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.filecache.DistributedCache;
-import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.fs.ProxyFileSystem;
-import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobInProgress;
-import org.apache.hadoop.mapred.JobTracker;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TaskLogServlet;
-import org.apache.hadoop.mapred.WebHCatJTShim20S;
-import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.JobStatus;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskID;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.KerberosName;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.tools.distcp2.DistCp;
-import org.apache.hadoop.tools.distcp2.DistCpOptions;
-import org.apache.hadoop.tools.distcp2.DistCpOptions.FileAttribute;
-
-import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.VersionInfo;
-
-
-/**
- * Implemention of shims against Hadoop 0.20 with Security.
- */
-public class Hadoop20SShims extends HadoopShimsSecure {
-
- @Override
- public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
- return new CombineFileInputFormatShim() {
- @Override
- public RecordReader getRecordReader(InputSplit split,
- JobConf job, Reporter reporter) throws IOException {
- throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
- }
-
- @Override
- protected FileStatus[] listStatus(JobConf job) throws IOException {
- FileStatus[] result = super.listStatus(job);
- boolean foundDir = false;
- for (FileStatus stat: result) {
- if (stat.isDir()) {
- foundDir = true;
- break;
- }
- }
- if (!foundDir) {
- return result;
- }
- ArrayList files = new ArrayList();
- for (FileStatus stat: result) {
- if (!stat.isDir()) {
- files.add(stat);
- }
- }
- return files.toArray(new FileStatus[files.size()]);
- }
- };
- }
-
- @Override
- public String getTaskAttemptLogUrl(JobConf conf,
- String taskTrackerHttpAddress, String taskAttemptId)
- throws MalformedURLException {
- URL taskTrackerHttpURL = new URL(taskTrackerHttpAddress);
- return TaskLogServlet.getTaskLogUrl(
- taskTrackerHttpURL.getHost(),
- Integer.toString(taskTrackerHttpURL.getPort()),
- taskAttemptId);
- }
-
- @Override
- public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
- switch (clusterStatus.getJobTrackerState()) {
- case INITIALIZING:
- return JobTrackerState.INITIALIZING;
- case RUNNING:
- return JobTrackerState.RUNNING;
- default:
- String errorMsg = "Unrecognized JobTracker state: " + clusterStatus.getJobTrackerState();
- throw new Exception(errorMsg);
- }
- }
-
- @Override
- public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
- return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) {
- @Override
- public void progress() {
- progressable.progress();
- }
- };
- }
-
- @Override
- public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) {
- return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap, taskId, id);
- }
-
- @Override
- public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
- return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(), job.getJobID());
- }
-
- @Override
- public boolean isLocalMode(Configuration conf) {
- return "local".equals(getJobLauncherRpcAddress(conf));
- }
-
- @Override
- public String getJobLauncherRpcAddress(Configuration conf) {
- return conf.get("mapred.job.tracker");
- }
-
- @Override
- public void setJobLauncherRpcAddress(Configuration conf, String val) {
- conf.set("mapred.job.tracker", val);
- }
-
- @Override
- public String getJobLauncherHttpAddress(Configuration conf) {
- return conf.get("mapred.job.tracker.http.address");
- }
-
- @Override
- public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
- throws IOException {
- // older versions of Hadoop don't have a Trash constructor based on the
- // Path or FileSystem. So need to achieve this by creating a dummy conf.
- // this needs to be filtered out based on version
-
- Configuration dupConf = new Configuration(conf);
- FileSystem.setDefaultUri(dupConf, fs.getUri());
- Trash trash = new Trash(dupConf);
- return trash.moveToTrash(path);
- }
- @Override
- public long getDefaultBlockSize(FileSystem fs, Path path) {
- return fs.getDefaultBlockSize();
- }
-
- @Override
- public short getDefaultReplication(FileSystem fs, Path path) {
- return fs.getDefaultReplication();
- }
-
- @Override
- public void refreshDefaultQueue(Configuration conf, String userName) {
- // MR1 does not expose API required to set MR queue mapping for user
- }
-
- @Override
- public void setTotalOrderPartitionFile(JobConf jobConf, Path partitionFile){
- TotalOrderPartitioner.setPartitionFile(jobConf, partitionFile);
- }
-
- @Override
- public Comparator getLongComparator() {
- return new Comparator() {
- @Override
- public int compare(LongWritable o1, LongWritable o2) {
- return o1.compareTo(o2);
- }
- };
- }
-
- /**
- * Returns a shim to wrap MiniMrCluster
- */
- @Override
- public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
- String nameNode, int numDir) throws IOException {
- return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
- }
-
- @Override
- public MiniMrShim getMiniTezCluster(Configuration conf, int numberOfTaskTrackers,
- String nameNode, boolean isLlap) throws IOException {
- throw new IOException("Cannot run tez on current hadoop, Version: " + VersionInfo.getVersion());
- }
-
- @Override
- public MiniMrShim getMiniSparkCluster(Configuration conf, int numberOfTaskTrackers,
- String nameNode, int numDir) throws IOException {
- throw new IOException("Cannot run Spark on YARN on current Hadoop, Version: " + VersionInfo.getVersion());
- }
-
- /**
- * Shim for MiniMrCluster
- */
- public class MiniMrShim implements HadoopShims.MiniMrShim {
-
- private final MiniMRCluster mr;
-
- public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
- String nameNode, int numDir) throws IOException {
- this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
- }
-
- @Override
- public int getJobTrackerPort() throws UnsupportedOperationException {
- return mr.getJobTrackerPort();
- }
-
- @Override
- public void shutdown() throws IOException {
- MiniMRCluster.JobTrackerRunner runner = mr.getJobTrackerRunner();
- JobTracker tracker = runner.getJobTracker();
- if (tracker != null) {
- for (JobInProgress running : tracker.getRunningJobs()) {
- try {
- running.kill();
- } catch (Exception e) {
- // ignore
- }
- }
- }
- runner.shutdown();
- }
-
- @Override
- public void setupConfiguration(Configuration conf) {
- setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
- }
- }
-
- // Don't move this code to the parent class. There's a binary
- // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
- // need to have two different shim classes even though they are
- // exactly the same.
- @Override
- public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
- int numDataNodes,
- boolean format,
- String[] racks) throws IOException {
- return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
- }
-
- /**
- * MiniDFSShim.
- *
- */
- public class MiniDFSShim implements HadoopShims.MiniDFSShim {
- private final MiniDFSCluster cluster;
-
- public MiniDFSShim(MiniDFSCluster cluster) {
- this.cluster = cluster;
- }
-
- @Override
- public FileSystem getFileSystem() throws IOException {
- return cluster.getFileSystem();
- }
-
- @Override
- public void shutdown() {
- cluster.shutdown();
- }
- }
- private volatile HCatHadoopShims hcatShimInstance;
- @Override
- public HCatHadoopShims getHCatShim() {
- if(hcatShimInstance == null) {
- hcatShimInstance = new HCatHadoopShims20S();
- }
- return hcatShimInstance;
- }
- private final class HCatHadoopShims20S implements HCatHadoopShims {
- @Override
- public TaskID createTaskID() {
- return new TaskID();
- }
-
- @Override
- public TaskAttemptID createTaskAttemptID() {
- return new TaskAttemptID();
- }
-
- @Override
- public TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId) {
- return new TaskAttemptContext(conf, taskId);
- }
-
- @Override
- public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf,
- org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) {
- org.apache.hadoop.mapred.TaskAttemptContext newContext = null;
- try {
- java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContext.class.getDeclaredConstructor(
- org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class,
- Progressable.class);
- construct.setAccessible(true);
- newContext = (org.apache.hadoop.mapred.TaskAttemptContext)construct.newInstance(conf, taskId, progressable);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- return newContext;
- }
-
- @Override
- public JobContext createJobContext(Configuration conf,
- JobID jobId) {
- return new JobContext(conf, jobId);
- }
-
- @Override
- public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
- org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
- org.apache.hadoop.mapred.JobContext newContext = null;
- try {
- java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.JobContext.class.getDeclaredConstructor(
- org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapreduce.JobID.class,
- Progressable.class);
- construct.setAccessible(true);
- newContext = (org.apache.hadoop.mapred.JobContext)construct.newInstance(conf, jobId, progressable);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- return newContext;
- }
-
- @Override
- public void commitJob(OutputFormat outputFormat, Job job) throws IOException {
- if( job.getConfiguration().get("mapred.job.tracker", "").equalsIgnoreCase("local") ) {
- try {
- //In local mode, mapreduce will not call OutputCommitter.cleanupJob.
- //Calling it from here so that the partition publish happens.
- //This call needs to be removed after MAPREDUCE-1447 is fixed.
- outputFormat.getOutputCommitter(createTaskAttemptContext(
- job.getConfiguration(), createTaskAttemptID())).commitJob(job);
- } catch (IOException e) {
- throw new IOException("Failed to cleanup job",e);
- } catch (InterruptedException e) {
- throw new IOException("Failed to cleanup job",e);
- }
- }
- }
-
- @Override
- public void abortJob(OutputFormat outputFormat, Job job) throws IOException {
- if (job.getConfiguration().get("mapred.job.tracker", "")
- .equalsIgnoreCase("local")) {
- try {
- // This call needs to be removed after MAPREDUCE-1447 is fixed.
- outputFormat.getOutputCommitter(createTaskAttemptContext(
- job.getConfiguration(), new TaskAttemptID())).abortJob(job, JobStatus.State.FAILED);
- } catch (IOException e) {
- throw new IOException("Failed to abort job", e);
- } catch (InterruptedException e) {
- throw new IOException("Failed to abort job", e);
- }
- }
- }
-
- @Override
- public InetSocketAddress getResourceManagerAddress(Configuration conf)
- {
- return JobTracker.getAddress(conf);
- }
-
- @Override
- public String getPropertyName(PropertyName name) {
- switch (name) {
- case CACHE_ARCHIVES:
- return DistributedCache.CACHE_ARCHIVES;
- case CACHE_FILES:
- return DistributedCache.CACHE_FILES;
- case CACHE_SYMLINK:
- return DistributedCache.CACHE_SYMLINK;
- case CLASSPATH_ARCHIVES:
- return "mapred.job.classpath.archives";
- case CLASSPATH_FILES:
- return "mapred.job.classpath.files";
- }
-
- return "";
- }
-
- @Override
- public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException {
- // In hadoop 1.x.x the file system URI is sufficient to determine the uri of the file
- return "hdfs".equals(fs.getUri().getScheme());
- }
- }
- @Override
- public WebHCatJTShim getWebHCatShim(Configuration conf, UserGroupInformation ugi) throws IOException {
- return new WebHCatJTShim20S(conf, ugi);//this has state, so can't be cached
- }
-
- @Override
- public List listLocatedStatus(final FileSystem fs,
- final Path path,
- final PathFilter filter
- ) throws IOException {
- return Arrays.asList(fs.listStatus(path, filter));
- }
-
- @Override
- public BlockLocation[] getLocations(FileSystem fs,
- FileStatus status) throws IOException {
- return fs.getFileBlockLocations(status, 0, status.getLen());
- }
-
- @Override
- public TreeMap getLocationsWithOffset(FileSystem fs,
- FileStatus status) throws IOException {
- TreeMap offsetBlockMap = new TreeMap();
- BlockLocation[] locations = getLocations(fs, status);
- for (BlockLocation location : locations) {
- offsetBlockMap.put(location.getOffset(), location);
- }
- return offsetBlockMap;
- }
-
- @Override
- public void hflush(FSDataOutputStream stream) throws IOException {
- stream.sync();
- }
-
- @Override
- public HdfsFileStatus getFullFileStatus(Configuration conf, FileSystem fs, Path file)
- throws IOException {
- return new Hadoop20SFileStatus(fs.getFileStatus(file));
- }
-
- @Override
- public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus,
- FileSystem fs, Path target) throws IOException {
- String group = sourceStatus.getFileStatus().getGroup();
- String permission = Integer.toString(sourceStatus.getFileStatus().getPermission().toShort(), 8);
- //use FsShell to change group and permissions recursively
- try {
- FsShell fshell = new FsShell();
- fshell.setConf(conf);
- run(fshell, new String[]{"-chgrp", "-R", group, target.toString()});
- run(fshell, new String[]{"-chmod", "-R", permission, target.toString()});
- } catch (Exception e) {
- throw new IOException("Unable to set permissions of " + target, e);
- }
- try {
- if (LOG.isDebugEnabled()) { //some trace logging
- getFullFileStatus(conf, fs, target).debugLog();
- }
- } catch (Exception e) {
- //ignore.
- }
- }
-
- public class Hadoop20SFileStatus implements HdfsFileStatus {
- private final FileStatus fileStatus;
- public Hadoop20SFileStatus(FileStatus fileStatus) {
- this.fileStatus = fileStatus;
- }
- @Override
- public FileStatus getFileStatus() {
- return fileStatus;
- }
- @Override
- public void debugLog() {
- if (fileStatus != null) {
- LOG.debug(fileStatus.toString());
- }
- }
- }
-
- @Override
- public FileSystem createProxyFileSystem(FileSystem fs, URI uri) {
- return new ProxyFileSystem(fs, uri);
- }
- @Override
- public Map getHadoopConfNames() {
- Map ret = new HashMap();
- ret.put("HADOOPFS", "fs.default.name");
- ret.put("HADOOPMAPFILENAME", "map.input.file");
- ret.put("HADOOPMAPREDINPUTDIR", "mapred.input.dir");
- ret.put("HADOOPMAPREDINPUTDIRRECURSIVE", "mapred.input.dir.recursive");
- ret.put("MAPREDMAXSPLITSIZE", "mapred.max.split.size");
- ret.put("MAPREDMINSPLITSIZE", "mapred.min.split.size");
- ret.put("MAPREDMINSPLITSIZEPERNODE", "mapred.min.split.size.per.node");
- ret.put("MAPREDMINSPLITSIZEPERRACK", "mapred.min.split.size.per.rack");
- ret.put("HADOOPNUMREDUCERS", "mapred.reduce.tasks");
- ret.put("HADOOPJOBNAME", "mapred.job.name");
- ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution");
- ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed");
- ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed");
- return ret;
- }
-
- @Override
- public ZeroCopyReaderShim getZeroCopyReader(FSDataInputStream in, ByteBufferPoolShim pool) throws IOException {
- /* not supported */
- return null;
- }
-
- @Override
- public DirectDecompressorShim getDirectDecompressor(DirectCompressionType codec) {
- /* not supported */
- return null;
- }
-
- @Override
- public Configuration getConfiguration(org.apache.hadoop.mapreduce.JobContext context) {
- return context.getConfiguration();
- }
-
- @Override
- public JobConf getJobConf(org.apache.hadoop.mapred.JobContext context) {
- return context.getJobConf();
- }
-
- @Override
- public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOException {
- boolean origDisableHDFSCache =
- conf.getBoolean("fs." + uri.getScheme() + ".impl.disable.cache", false);
- // hadoop-1 compatible flag.
- conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", true);
- FileSystem fs = FileSystem.get(uri, conf);
- conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", origDisableHDFSCache);
- return fs;
- }
-
- @Override
- public void getMergedCredentials(JobConf jobConf) throws IOException {
- throw new IOException("Merging of credentials not supported in this version of hadoop");
- }
-
- @Override
- public void mergeCredentials(JobConf dest, JobConf src) throws IOException {
- throw new IOException("Merging of credentials not supported in this version of hadoop");
- }
-
- @Override
- public String getPassword(Configuration conf, String name) {
- // No password API, just retrieve value from conf
- return conf.get(name);
- }
-
- @Override
- public boolean supportStickyBit() {
- return false;
- }
-
- @Override
- public boolean hasStickyBit(FsPermission permission) {
- return false;
- }
-
- @Override
- public boolean supportTrashFeature() {
- return false;
- }
-
- @Override
- public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
- return null;
- }
-
- @Override
- public boolean isDirectory(FileStatus fileStatus) {
- return fileStatus.isDir();
- }
-
- /**
- * Returns a shim to wrap KerberosName
- */
- @Override
- public KerberosNameShim getKerberosNameShim(String name) throws IOException {
- return new KerberosNameShim(name);
- }
-
- /**
- * Shim for KerberosName
- */
- public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim {
-
- private final KerberosName kerberosName;
-
- public KerberosNameShim(String name) {
- kerberosName = new KerberosName(name);
- }
-
- @Override
- public String getDefaultRealm() {
- return kerberosName.getDefaultRealm();
- }
-
- @Override
- public String getServiceName() {
- return kerberosName.getServiceName();
- }
-
- @Override
- public String getHostName() {
- return kerberosName.getHostName();
- }
-
- @Override
- public String getRealm() {
- return kerberosName.getRealm();
- }
-
- @Override
- public String getShortName() throws IOException {
- return kerberosName.getShortName();
- }
- }
-
- @Override
- public StoragePolicyShim getStoragePolicyShim(FileSystem fs) {
- return null;
- }
-
- @Override
- public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException {
-
- DistCpOptions options = new DistCpOptions(Collections.singletonList(src), dst);
- options.setSyncFolder(true);
- options.setSkipCRC(true);
- options.preserve(FileAttribute.BLOCKSIZE);
- try {
- DistCp distcp = new DistCp(conf, options);
- distcp.execute();
- return true;
- } catch (Exception e) {
- throw new IOException("Cannot execute DistCp process: " + e, e);
- }
- }
-
- @Override
- public HdfsEncryptionShim createHdfsEncryptionShim(FileSystem fs, Configuration conf) throws IOException {
- return new HadoopShims.NoopHdfsEncryptionShim();
- }
-
- @Override
- public Path getPathWithoutSchemeAndAuthority(Path path) {
- return path;
- }
-
- @Override
- public List listLocatedHdfsStatus(
- FileSystem fs, Path path, PathFilter filter) throws IOException {
- throw new UnsupportedOperationException("Not supported on old version");
- }
-
- @Override
- public int readByteBuffer(FSDataInputStream file, ByteBuffer dest) throws IOException {
- // Inefficient for direct buffers; only here for compat.
- int pos = dest.position();
- if (dest.hasArray()) {
- int result = file.read(dest.array(), dest.arrayOffset(), dest.remaining());
- if (result > 0) {
- dest.position(pos + result);
- }
- return result;
- } else {
- byte[] arr = new byte[dest.remaining()];
- int result = file.read(arr, 0, arr.length);
- if (result > 0) {
- dest.put(arr, 0, result);
- dest.position(pos + result);
- }
- return result;
- }
- }
-
- @Override
- public void addDelegationTokens(FileSystem fs, Credentials cred, String uname) throws IOException {
- Token> fsToken = fs.getDelegationToken(uname);
- cred.addToken(fsToken.getService(), fsToken);
- }
-
- @Override
- public long getFileId(FileSystem fs, String path) throws IOException {
- throw new UnsupportedOperationException("Not supported on old version");
- }
-}
diff --git shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java
deleted file mode 100644
index 75659ff..0000000
--- shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.shims;
-
-
-import org.mortbay.jetty.bio.SocketConnector;
-import org.mortbay.jetty.handler.RequestLogHandler;
-import org.mortbay.jetty.webapp.WebAppContext;
-
-import java.io.IOException;
-
-public class Jetty20SShims implements JettyShims {
- public Server startServer(String listen, int port) throws IOException {
- Server s = new Server();
- s.setupListenerHostPort(listen, port);
- return s;
- }
-
- private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server {
- public void addWar(String war, String contextPath) {
- WebAppContext wac = new WebAppContext();
- wac.setContextPath(contextPath);
- wac.setWar(war);
- RequestLogHandler rlh = new RequestLogHandler();
- rlh.setHandler(wac);
- this.addHandler(rlh);
- }
-
- public void setupListenerHostPort(String listen, int port)
- throws IOException {
-
- SocketConnector connector = new SocketConnector();
- connector.setPort(port);
- connector.setHost(listen);
- this.addConnector(connector);
- }
- }
-}
diff --git shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java
deleted file mode 100644
index 367ea60..0000000
--- shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.filecache.DistributedCache;
-import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UserGroupInformation;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- * This is in org.apache.hadoop.mapred package because it relies on
- * JobSubmissionProtocol which is package private
- */
-public class WebHCatJTShim20S implements WebHCatJTShim {
- private JobSubmissionProtocol cnx;
-
- /**
- * Create a connection to the Job Tracker.
- */
- public WebHCatJTShim20S(Configuration conf, UserGroupInformation ugi)
- throws IOException {
- cnx = (JobSubmissionProtocol)
- RPC.getProxy(JobSubmissionProtocol.class,
- JobSubmissionProtocol.versionID,
- getAddress(conf),
- ugi,
- conf,
- NetUtils.getSocketFactory(conf,
- JobSubmissionProtocol.class));
- }
-
- /**
- * Grab a handle to a job that is already known to the JobTracker.
- *
- * @return Profile of the job, or null if not found.
- */
- public JobProfile getJobProfile(org.apache.hadoop.mapred.JobID jobid)
- throws IOException {
- return cnx.getJobProfile(jobid);
- }
-
- /**
- * Grab a handle to a job that is already known to the JobTracker.
- *
- * @return Status of the job, or null if not found.
- */
- public org.apache.hadoop.mapred.JobStatus getJobStatus(org.apache.hadoop.mapred.JobID jobid)
- throws IOException {
- return cnx.getJobStatus(jobid);
- }
-
-
- /**
- * Kill a job.
- */
- public void killJob(org.apache.hadoop.mapred.JobID jobid)
- throws IOException {
- cnx.killJob(jobid);
- }
-
- /**
- * Get all the jobs submitted.
- */
- public org.apache.hadoop.mapred.JobStatus[] getAllJobs()
- throws IOException {
- return cnx.getAllJobs();
- }
-
- /**
- * Close the connection to the Job Tracker.
- */
- public void close() {
- RPC.stopProxy(cnx);
- }
- private InetSocketAddress getAddress(Configuration conf) {
- String jobTrackerStr = conf.get("mapred.job.tracker", "localhost:8012");
- return NetUtils.createSocketAddr(jobTrackerStr);
- }
- @Override
- public void addCacheFile(URI uri, Job job) {
- DistributedCache.addCacheFile(uri, job.getConfiguration());
- }
- /**
- * Kill jobs is only supported on hadoop 2.0+.
- */
- @Override
- public void killJobs(String tag, long timestamp) {
- return;
- }
- /**
- * Get jobs is only supported on hadoop 2.0+.
- */
- @Override
- public Set getJobs(String tag, long timestamp)
- {
- return new HashSet();
- }
-}
-
diff --git shims/0.23/pom.xml shims/0.23/pom.xml
index 3b1fb97..eee594e 100644
--- shims/0.23/pom.xml
+++ shims/0.23/pom.xml
@@ -54,31 +54,32 @@
org.apache.hadoop
hadoop-common
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-hdfs
- ${hadoop-23.version}
+ ${hadoop.version}
+ true
org.apache.hadoop
hadoop-hdfs
- ${hadoop-23.version}
+ ${hadoop.version}
test-jar
true
org.apache.hadoop
hadoop-mapreduce-client-core
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-mapreduce-client-jobclient
- ${hadoop-23.version}
+ ${hadoop.version}
test-jar
true
@@ -103,25 +104,25 @@
org.apache.hadoop
hadoop-yarn-api
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-common
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-client
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-server-resourcemanager
- ${hadoop-23.version}
+ ${hadoop.version}
javax.servlet
@@ -139,15 +140,16 @@
org.apache.hadoop
hadoop-yarn-server-tests
- ${hadoop-23.version}
+ ${hadoop.version}
true
test-jar
org.apache.hadoop
hadoop-distcp
- ${hadoop-23.version}
+ ${hadoop.version}
provided
-
+
+
diff --git shims/aggregator/pom.xml shims/aggregator/pom.xml
index 07f6d1b..d8c39a2 100644
--- shims/aggregator/pom.xml
+++ shims/aggregator/pom.xml
@@ -41,12 +41,6 @@
org.apache.hive.shims
- hive-shims-0.20S
- ${project.version}
- runtime
-
-
- org.apache.hive.shims
hive-shims-0.23
${project.version}
runtime
diff --git shims/common/pom.xml shims/common/pom.xml
index dfdec2b..76d8da5 100644
--- shims/common/pom.xml
+++ shims/common/pom.xml
@@ -62,8 +62,8 @@
org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
+ hadoop-client
+ ${hadoop.version}
true
diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
index c7fa11b..0fe3169 100644
--- shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
+++ shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
@@ -17,19 +17,18 @@
*/
package org.apache.hadoop.hive.shims;
-import java.util.HashMap;
-import java.util.Map;
-
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hadoop.util.VersionInfo;
import org.apache.log4j.AppenderSkeleton;
+import java.util.HashMap;
+import java.util.Map;
+
/**
* ShimLoader.
*
*/
public abstract class ShimLoader {
- public static String HADOOP20SVERSIONNAME = "0.20S";
public static String HADOOP23VERSIONNAME = "0.23";
private static HadoopShims hadoopShims;
@@ -45,7 +44,6 @@
new HashMap();
static {
- HADOOP_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.hive.shims.Hadoop20SShims");
HADOOP_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.shims.Hadoop23Shims");
}
@@ -57,7 +55,6 @@
new HashMap();
static {
- JETTY_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.hive.shims.Jetty20SShims");
JETTY_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.shims.Jetty23Shims");
}
@@ -68,21 +65,17 @@
new HashMap();
static {
- EVENT_COUNTER_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.log.metrics" +
- ".EventCounter");
EVENT_COUNTER_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.log.metrics" +
".EventCounter");
}
/**
- * The names of the classes for shimming {@link HadoopThriftAuthBridge}
+ * The names of the classes for shimming HadoopThriftAuthBridge
*/
private static final HashMap HADOOP_THRIFT_AUTH_BRIDGE_CLASSES =
new HashMap();
static {
- HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put(HADOOP20SVERSIONNAME,
- "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge");
HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put(HADOOP23VERSIONNAME,
"org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23");
}
@@ -166,8 +159,6 @@ public static String getMajorVersion() {
}
switch (Integer.parseInt(parts[0])) {
- case 1:
- return HADOOP20SVERSIONNAME;
case 2:
return HADOOP23VERSIONNAME;
default:
diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index 7ed7265..6b0bd10 100644
--- shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -71,8 +71,11 @@
/**
* Functions that bridge Thrift's SASL transports to Hadoop's
* SASL callback handlers and authentication classes.
+ * HIVE-11378 This class is not directly used anymore. It now exists only as a shell to be
+ * extended by HadoopThriftAuthBridge23 in 0.23 shims. I have made it abstract
+ * to avoid maintenance errors.
*/
-public class HadoopThriftAuthBridge {
+public abstract class HadoopThriftAuthBridge {
private static final Log LOG = LogFactory.getLog(HadoopThriftAuthBridge.class);
public Client createClient() {
@@ -164,11 +167,7 @@ private boolean loginUserHasCurrentAuthMethod(UserGroupInformation ugi, String s
* @return Hadoop SASL configuration
*/
- public Map getHadoopSaslProperties(Configuration conf) {
- // Initialize the SaslRpcServer to ensure QOP parameters are read from conf
- SaslRpcServer.init(conf);
- return SaslRpcServer.SASL_PROPS;
- }
+ public abstract Map getHadoopSaslProperties(Configuration conf);
public static class Client {
/**
diff --git shims/pom.xml shims/pom.xml
index 12113d5..ffacf75 100644
--- shims/pom.xml
+++ shims/pom.xml
@@ -33,7 +33,6 @@
common
- 0.20S
0.23
scheduler
aggregator
diff --git shims/scheduler/pom.xml shims/scheduler/pom.xml
index 407d57d..276b6cb 100644
--- shims/scheduler/pom.xml
+++ shims/scheduler/pom.xml
@@ -49,43 +49,43 @@
org.apache.hadoop
hadoop-common
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-mapreduce-client-core
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-api
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-common
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-client
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-server-resourcemanager
- ${hadoop-23.version}
+ ${hadoop.version}
true
org.apache.hadoop
hadoop-yarn-server-tests
- ${hadoop-23.version}
+ ${hadoop.version}
true
test-jar
diff --git storage-api/pom.xml storage-api/pom.xml
index 71b79f1..0af0d27 100644
--- storage-api/pom.xml
+++ storage-api/pom.xml
@@ -34,6 +34,12 @@
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ true
+
+
junit
junit
${junit.version}
@@ -41,31 +47,6 @@
-
-
- hadoop-1
-
-
- org.apache.hadoop
- hadoop-core
- ${hadoop-20S.version}
- true
-
-
-
-
- hadoop-2
-
-
- org.apache.hadoop
- hadoop-common
- ${hadoop-23.version}
- true
-
-
-
-
-
${basedir}/src/java
${basedir}/src/test