From 0865e747b07b8faa3b7945aa8b97140b7752577c Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Fri, 17 May 2019 14:34:11 +0800 Subject: [PATCH] HBASE-22413 Backport 'HBASE-22399 Change default hadoop-two.version to 2.8.x and remove the 2.7.x hadoop checks' to branch-1 --- dev-support/hbase-personality.sh | 20 ++++++++++++------- .../client/TestReversedScannerCallable.java | 4 +--- .../hadoop/hbase/fs/TestBlockReorder.java | 17 ++++------------ pom.xml | 14 +++++++++++-- 4 files changed, 30 insertions(+), 25 deletions(-) diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh index 7f930d0dc4..4124b02237 100755 --- a/dev-support/hbase-personality.sh +++ b/dev-support/hbase-personality.sh @@ -488,27 +488,33 @@ function hadoopcheck_rebuild # All supported Hadoop versions that we want to test the compilation with # See the Hadoop section on prereqs in the HBase Reference Guide - hbase_common_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4" if [[ "${PATCH_BRANCH}" = branch-1.* ]] && [[ "${PATCH_BRANCH#branch-1.}" -lt "5" ]]; then yetus_info "Setting Hadoop 2 versions to test based on before-branch-1.5 rules." if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then - hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.4" + hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.7" else - hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 ${hbase_common_hadoop2_versions}" + hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7" fi elif [[ "${PATCH_BRANCH}" = branch-2.0 ]]; then yetus_info "Setting Hadoop 2 versions to test based on branch-2.0 rules." if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then - hbase_hadoop2_versions="2.6.5 2.7.4" + hbase_hadoop2_versions="2.6.5 2.7.7 2.8.5" else - hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 ${hbase_common_hadoop2_versions}" + hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5" + fi + elif [[ "${PATCH_BRANCH}" = branch-2.1 ]]; then + yetus_info "Setting Hadoop 2 versions to test based on branch-2.1 rules." + if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then + hbase_hadoop2_versions="2.7.7 2.8.5" + else + hbase_hadoop2_versions="2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5" fi else yetus_info "Setting Hadoop 2 versions to test based on branch-1.5+/branch-2.1+/master/feature branch rules." if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then - hbase_hadoop2_versions="2.7.4" + hbase_hadoop2_versions="2.8.5 2.9.2" else - hbase_hadoop2_versions="${hbase_common_hadoop2_versions}" + hbase_hadoop2_versions="2.8.2 2.8.3 2.8.4 2.8.5 2.9.1 2.9.2" fi fi hbase_hadoop3_versions="3.0.0" diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestReversedScannerCallable.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestReversedScannerCallable.java index 6c2d0a6b23..23239cfe8d 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestReversedScannerCallable.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestReversedScannerCallable.java @@ -53,13 +53,11 @@ public class TestReversedScannerCallable { @Before public void setUp() throws Exception { byte[] ROW_BEFORE = ConnectionUtils.createCloseRowBefore(ROW); - - Configuration conf = Mockito.mock(Configuration.class); HRegionLocation regionLocation = Mockito.mock(HRegionLocation.class); ServerName serverName = Mockito.mock(ServerName.class); HRegionInfo regionInfo = Mockito.mock(HRegionInfo.class); - Mockito.when(connection.getConfiguration()).thenReturn(conf); + Mockito.when(connection.getConfiguration()).thenReturn(new Configuration()); Mockito.when(regionLocations.size()).thenReturn(1); Mockito.when(regionLocations.getRegionLocation(0)).thenReturn(regionLocation); Mockito.when(regionLocation.getHostname()).thenReturn("localhost"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java index d114e354c7..c02f4fdf57 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.fs; import java.io.FileNotFoundException; @@ -27,10 +26,8 @@ import java.net.BindException; import java.net.ServerSocket; import java.util.List; import java.util.concurrent.CountDownLatch; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -38,12 +35,8 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.testclassification.LargeTests; -import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -52,6 +45,9 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.DefaultWALProvider; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -63,7 +59,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.apache.log4j.Level; +import org.apache.hadoop.ipc.RemoteException; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -77,11 +73,6 @@ import org.junit.experimental.categories.Category; public class TestBlockReorder { private static final Log LOG = LogFactory.getLog(TestBlockReorder.class); - static { - ((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger) HFileSystem.LOG).getLogger().setLevel(Level.ALL); - } - private Configuration conf; private MiniDFSCluster cluster; private HBaseTestingUtility htu; diff --git a/pom.xml b/pom.xml index 1f3db19aa4..3fa579c495 100644 --- a/pom.xml +++ b/pom.xml @@ -1249,7 +1249,7 @@ 3.0.3 ${compileSource} - 2.7.4 + 2.8.5 ${hadoop-two.version} 3.0.0 @@ -2273,7 +2273,17 @@ org.apache.hadoop hadoop-auth - ${hadoop-two.version} + ${hadoop-two.version} + + + com.google.guava + guava + + + net.minidev + json-smart + + org.apache.hadoop -- 2.17.1