diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 61d7c9baaa8..121b3ab98e6 100755 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -90,6 +90,7 @@ 3.0.0 3.1.0-RC1 + 5.0.0-RC2 11.0.2 4.0 2.9.4 @@ -854,6 +855,16 @@ 4.11 + org.junit.jupiter + junit-jupiter-api + ${junit5.version} + + + org.junit.jupiter + junit-jupiter-params + ${junit5.version} + + commons-lang commons-lang 2.6 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml index f17cf8c500b..08c7e6c30db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml @@ -129,8 +129,13 @@ protobuf-java - junit - junit + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-params test diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/BasePBImplRecordsTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/BasePBImplRecordsTest.java index 82170b31342..aacc2642900 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/BasePBImplRecordsTest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/BasePBImplRecordsTest.java @@ -22,12 +22,15 @@ import com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.junit.Assert; import java.lang.reflect.*; import java.nio.ByteBuffer; import java.util.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; + /** * Generic helper class to validate protocol records. */ @@ -191,7 +194,7 @@ public String toString() { p.getMethod = m; ret.put(propertyName, p); } else { - Assert.fail("Multiple get method with same name: " + recordClass + fail("Multiple get method with same name: " + recordClass + p.propertyName); } } @@ -247,18 +250,18 @@ public String toString() { gsp.setMethod.invoke(origRecord, gsp.testValue); } Object ret = getProto.invoke(origRecord); - Assert.assertNotNull(recordClass.getName() + "#getProto returns null", ret); + assertNotNull(ret, recordClass.getName() + "#getProto returns null"); if (!(protoClass.isAssignableFrom(ret.getClass()))) { - Assert.fail("Illegal getProto method return type: " + ret.getClass()); + fail("Illegal getProto method return type: " + ret.getClass()); } R deserRecord = pbConstructor.newInstance(ret); - Assert.assertEquals("whole " + recordClass + " records should be equal", - origRecord, deserRecord); + assertEquals(origRecord, deserRecord, + "whole " + recordClass + " records should be equal"); for (GetSetPair gsp : getSetPairs.values()) { Object origValue = gsp.getMethod.invoke(origRecord); Object deserValue = gsp.getMethod.invoke(deserRecord); - Assert.assertEquals("property " + recordClass.getName() + "#" - + gsp.propertyName + " should be equal", origValue, deserValue); + assertEquals(origValue, deserValue, "property " + recordClass.getName() + + "#" + gsp.propertyName + " should be equal"); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java index f54ed78ac1e..97a91348e90 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java @@ -19,41 +19,43 @@ package org.apache.hadoop.yarn.api; -import org.junit.Assert; - import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class TestApplicationAttemptId { +class TestApplicationAttemptId { @Test - public void testApplicationAttemptId() { + void testApplicationAttemptId() { ApplicationAttemptId a1 = createAppAttemptId(10l, 1, 1); ApplicationAttemptId a2 = createAppAttemptId(10l, 1, 2); ApplicationAttemptId a3 = createAppAttemptId(10l, 2, 1); ApplicationAttemptId a4 = createAppAttemptId(8l, 1, 4); ApplicationAttemptId a5 = createAppAttemptId(10l, 1, 1); - Assert.assertTrue(a1.equals(a5)); - Assert.assertFalse(a1.equals(a2)); - Assert.assertFalse(a1.equals(a3)); - Assert.assertFalse(a1.equals(a4)); + assertTrue(a1.equals(a5)); + assertFalse(a1.equals(a2)); + assertFalse(a1.equals(a3)); + assertFalse(a1.equals(a4)); - Assert.assertTrue(a1.compareTo(a5) == 0); - Assert.assertTrue(a1.compareTo(a2) < 0); - Assert.assertTrue(a1.compareTo(a3) < 0); - Assert.assertTrue(a1.compareTo(a4) > 0); + assertTrue(a1.compareTo(a5) == 0); + assertTrue(a1.compareTo(a2) < 0); + assertTrue(a1.compareTo(a3) < 0); + assertTrue(a1.compareTo(a4) > 0); - Assert.assertTrue(a1.hashCode() == a5.hashCode()); - Assert.assertFalse(a1.hashCode() == a2.hashCode()); - Assert.assertFalse(a1.hashCode() == a3.hashCode()); - Assert.assertFalse(a1.hashCode() == a4.hashCode()); + assertTrue(a1.hashCode() == a5.hashCode()); + assertFalse(a1.hashCode() == a2.hashCode()); + assertFalse(a1.hashCode() == a3.hashCode()); + assertFalse(a1.hashCode() == a4.hashCode()); long ts = System.currentTimeMillis(); ApplicationAttemptId a6 = createAppAttemptId(ts, 543627, 33492611); - Assert.assertEquals("appattempt_10_0001_000001", a1.toString()); - Assert.assertEquals("appattempt_" + ts + "_543627_33492611", a6.toString()); + assertEquals("appattempt_10_0001_000001", a1.toString()); + assertEquals("appattempt_" + ts + "_543627_33492611", a6.toString()); } private ApplicationAttemptId createAppAttemptId( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java index ea25a64c95d..3cdf31c0aac 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java @@ -18,36 +18,38 @@ package org.apache.hadoop.yarn.api; -import org.junit.Assert; - import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class TestApplicationId { +class TestApplicationId { @Test - public void testApplicationId() { + void testApplicationId() { ApplicationId a1 = ApplicationId.newInstance(10l, 1); ApplicationId a2 = ApplicationId.newInstance(10l, 2); ApplicationId a3 = ApplicationId.newInstance(10l, 1); ApplicationId a4 = ApplicationId.newInstance(8l, 3); - Assert.assertFalse(a1.equals(a2)); - Assert.assertFalse(a1.equals(a4)); - Assert.assertTrue(a1.equals(a3)); + assertFalse(a1.equals(a2)); + assertFalse(a1.equals(a4)); + assertTrue(a1.equals(a3)); - Assert.assertTrue(a1.compareTo(a2) < 0); - Assert.assertTrue(a1.compareTo(a3) == 0); - Assert.assertTrue(a1.compareTo(a4) > 0); + assertTrue(a1.compareTo(a2) < 0); + assertTrue(a1.compareTo(a3) == 0); + assertTrue(a1.compareTo(a4) > 0); - Assert.assertTrue(a1.hashCode() == a3.hashCode()); - Assert.assertFalse(a1.hashCode() == a2.hashCode()); - Assert.assertFalse(a2.hashCode() == a4.hashCode()); + assertTrue(a1.hashCode() == a3.hashCode()); + assertFalse(a1.hashCode() == a2.hashCode()); + assertFalse(a2.hashCode() == a4.hashCode()); long ts = System.currentTimeMillis(); ApplicationId a5 = ApplicationId.newInstance(ts, 45436343); - Assert.assertEquals("application_10_0001", a1.toString()); - Assert.assertEquals("application_" + ts + "_45436343", a5.toString()); + assertEquals("application_10_0001", a1.toString()); + assertEquals("application_" + ts + "_45436343", a5.toString()); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicatonReport.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicatonReport.java index 46fc4d58d92..db1f23ff533 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicatonReport.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicatonReport.java @@ -25,13 +25,16 @@ import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestApplicatonReport { +import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; + +class TestApplicatonReport { @Test - public void testApplicationReport() { + void testApplicationReport() { long timestamp = System.currentTimeMillis(); ApplicationReport appReport1 = createApplicationReport(1, 1, timestamp); @@ -39,15 +42,15 @@ public void testApplicationReport() { createApplicationReport(1, 1, timestamp); ApplicationReport appReport3 = createApplicationReport(1, 1, timestamp); - Assert.assertEquals(appReport1, appReport2); - Assert.assertEquals(appReport2, appReport3); + assertEquals(appReport1, appReport2); + assertEquals(appReport2, appReport3); appReport1.setApplicationId(null); - Assert.assertNull(appReport1.getApplicationId()); - Assert.assertNotSame(appReport1, appReport2); + assertNull(appReport1.getApplicationId()); + assertNotSame(appReport1, appReport2); appReport2.setCurrentApplicationAttemptId(null); - Assert.assertNull(appReport2.getCurrentApplicationAttemptId()); - Assert.assertNotSame(appReport2, appReport3); - Assert.assertNull(appReport1.getAMRMToken()); + assertNull(appReport2.getCurrentApplicationAttemptId()); + assertNotSame(appReport2, appReport3); + assertNull(appReport1.getAMRMToken()); } protected static ApplicationReport createApplicationReport( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java index 1643301072b..c16e8276e09 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java @@ -19,57 +19,59 @@ package org.apache.hadoop.yarn.api; -import org.junit.Assert; - import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestContainerId { @Test - public void testContainerId() { + void testContainerId() { ContainerId c1 = newContainerId(1, 1, 10l, 1); ContainerId c2 = newContainerId(1, 1, 10l, 2); ContainerId c3 = newContainerId(1, 1, 10l, 1); ContainerId c4 = newContainerId(1, 3, 10l, 1); ContainerId c5 = newContainerId(1, 3, 8l, 1); - Assert.assertTrue(c1.equals(c3)); - Assert.assertFalse(c1.equals(c2)); - Assert.assertFalse(c1.equals(c4)); - Assert.assertFalse(c1.equals(c5)); + assertTrue(c1.equals(c3)); + assertFalse(c1.equals(c2)); + assertFalse(c1.equals(c4)); + assertFalse(c1.equals(c5)); - Assert.assertTrue(c1.compareTo(c3) == 0); - Assert.assertTrue(c1.compareTo(c2) < 0); - Assert.assertTrue(c1.compareTo(c4) < 0); - Assert.assertTrue(c1.compareTo(c5) > 0); + assertTrue(c1.compareTo(c3) == 0); + assertTrue(c1.compareTo(c2) < 0); + assertTrue(c1.compareTo(c4) < 0); + assertTrue(c1.compareTo(c5) > 0); - Assert.assertTrue(c1.hashCode() == c3.hashCode()); - Assert.assertFalse(c1.hashCode() == c2.hashCode()); - Assert.assertFalse(c1.hashCode() == c4.hashCode()); - Assert.assertFalse(c1.hashCode() == c5.hashCode()); + assertTrue(c1.hashCode() == c3.hashCode()); + assertFalse(c1.hashCode() == c2.hashCode()); + assertFalse(c1.hashCode() == c4.hashCode()); + assertFalse(c1.hashCode() == c5.hashCode()); long ts = System.currentTimeMillis(); ContainerId c6 = newContainerId(36473, 4365472, ts, 25645811); - Assert.assertEquals("container_10_0001_01_000001", c1.toString()); - Assert.assertEquals(25645811, 0xffffffffffL & c6.getContainerId()); - Assert.assertEquals(0, c6.getContainerId() >> 40); - Assert.assertEquals("container_" + ts + "_36473_4365472_25645811", + assertEquals("container_10_0001_01_000001", c1.toString()); + assertEquals(25645811, 0xffffffffffL & c6.getContainerId()); + assertEquals(0, c6.getContainerId() >> 40); + assertEquals("container_" + ts + "_36473_4365472_25645811", c6.toString()); ContainerId c7 = newContainerId(36473, 4365472, ts, 4298334883325L); - Assert.assertEquals(999799999997L, 0xffffffffffL & c7.getContainerId()); - Assert.assertEquals(3, c7.getContainerId() >> 40); - Assert.assertEquals( + assertEquals(999799999997L, 0xffffffffffL & c7.getContainerId()); + assertEquals(3, c7.getContainerId() >> 40); + assertEquals( "container_e03_" + ts + "_36473_4365472_999799999997", c7.toString()); ContainerId c8 = newContainerId(36473, 4365472, ts, 844424930131965L); - Assert.assertEquals(1099511627773L, 0xffffffffffL & c8.getContainerId()); - Assert.assertEquals(767, c8.getContainerId() >> 40); - Assert.assertEquals( + assertEquals(1099511627773L, 0xffffffffffL & c8.getContainerId()); + assertEquals(767, c8.getContainerId() >> 40); + assertEquals( "container_e767_" + ts + "_36473_4365472_1099511627773", c8.toString()); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestGetApplicationsRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestGetApplicationsRequest.java index 3d95a0fb573..7544ad53c9c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestGetApplicationsRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestGetApplicationsRequest.java @@ -26,13 +26,14 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl; import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestGetApplicationsRequest { +import static org.junit.jupiter.api.Assertions.assertEquals; + +class TestGetApplicationsRequest { @Test - public void testGetApplicationsRequest(){ + void testGetApplicationsRequest(){ GetApplicationsRequest request = GetApplicationsRequest.newInstance(); EnumSet appStates = @@ -73,40 +74,34 @@ public void testGetApplicationsRequest(){ ((GetApplicationsRequestPBImpl)request).getProto()); // verify the whole record equals with original record - Assert.assertEquals(requestFromProto, request); + assertEquals(requestFromProto, request); // verify all properties are the same as original request - Assert.assertEquals( - "ApplicationStates from proto is not the same with original request", - requestFromProto.getApplicationStates(), appStates); + assertEquals(requestFromProto.getApplicationStates(), appStates, + "ApplicationStates from proto is not the same with original request"); - Assert.assertEquals( - "ApplicationTags from proto is not the same with original request", - requestFromProto.getApplicationTags(), tags); + assertEquals(requestFromProto.getApplicationTags(), tags, + "ApplicationTags from proto is not the same with original request"); - Assert.assertEquals( - "ApplicationTypes from proto is not the same with original request", - requestFromProto.getApplicationTypes(), types); + assertEquals(requestFromProto.getApplicationTypes(), types, + "ApplicationTypes from proto is not the same with original request"); - Assert.assertEquals( - "StartRange from proto is not the same with original request", - requestFromProto.getStartRange(), new LongRange(startBegin, startEnd)); + assertEquals(requestFromProto.getStartRange(), + new LongRange(startBegin, startEnd), + "StartRange from proto is not the same with original request"); - Assert.assertEquals( - "FinishRange from proto is not the same with original request", - requestFromProto.getFinishRange(), new LongRange(finishBegin, finishEnd)); + assertEquals(requestFromProto.getFinishRange(), + new LongRange(finishBegin, finishEnd), + "FinishRange from proto is not the same with original request"); - Assert.assertEquals( - "Limit from proto is not the same with original request", - requestFromProto.getLimit(), limit); + assertEquals(requestFromProto.getLimit(), limit, + "Limit from proto is not the same with original request"); - Assert.assertEquals( - "Queues from proto is not the same with original request", - requestFromProto.getQueues(), queues); + assertEquals(requestFromProto.getQueues(), queues, + "Queues from proto is not the same with original request"); - Assert.assertEquals( - "Users from proto is not the same with original request", - requestFromProto.getUsers(), users); + assertEquals(requestFromProto.getUsers(), users, + "Users from proto is not the same with original request"); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java index 32d31a30b02..30457b8f80c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java @@ -18,32 +18,34 @@ package org.apache.hadoop.yarn.api; -import org.junit.Assert; - import org.apache.hadoop.yarn.api.records.NodeId; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class TestNodeId { +class TestNodeId { @Test - public void testNodeId() { + void testNodeId() { NodeId nodeId1 = NodeId.newInstance("10.18.52.124", 8041); NodeId nodeId2 = NodeId.newInstance("10.18.52.125", 8038); NodeId nodeId3 = NodeId.newInstance("10.18.52.124", 8041); NodeId nodeId4 = NodeId.newInstance("10.18.52.124", 8039); - Assert.assertTrue(nodeId1.equals(nodeId3)); - Assert.assertFalse(nodeId1.equals(nodeId2)); - Assert.assertFalse(nodeId3.equals(nodeId4)); + assertTrue(nodeId1.equals(nodeId3)); + assertFalse(nodeId1.equals(nodeId2)); + assertFalse(nodeId3.equals(nodeId4)); - Assert.assertTrue(nodeId1.compareTo(nodeId3) == 0); - Assert.assertTrue(nodeId1.compareTo(nodeId2) < 0); - Assert.assertTrue(nodeId3.compareTo(nodeId4) > 0); + assertTrue(nodeId1.compareTo(nodeId3) == 0); + assertTrue(nodeId1.compareTo(nodeId2) < 0); + assertTrue(nodeId3.compareTo(nodeId4) > 0); - Assert.assertTrue(nodeId1.hashCode() == nodeId3.hashCode()); - Assert.assertFalse(nodeId1.hashCode() == nodeId2.hashCode()); - Assert.assertFalse(nodeId3.hashCode() == nodeId4.hashCode()); + assertTrue(nodeId1.hashCode() == nodeId3.hashCode()); + assertFalse(nodeId1.hashCode() == nodeId2.hashCode()); + assertFalse(nodeId3.hashCode() == nodeId4.hashCode()); - Assert.assertEquals("10.18.52.124:8041", nodeId1.toString()); + assertEquals("10.18.52.124:8041", nodeId1.toString()); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java index bb688c93a9f..6fc1eecd5e7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java @@ -325,20 +325,18 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl; import org.apache.hadoop.yarn.util.resource.Resources; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; import com.google.common.collect.ImmutableSet; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Test class for YARN API protocol records. */ -public class TestPBImplRecords extends BasePBImplRecordsTest { +class TestPBImplRecords extends BasePBImplRecordsTest { - @BeforeClass - public static void setup() throws Exception { + @BeforeAll + static void setup() throws Exception { typeValueCache.put(LongRange.class, new LongRange(1000, 2000)); typeValueCache.put(URL.class, URL.newInstance( "http", "localhost", 8080, "file0")); @@ -409,293 +407,293 @@ public static void setup() throws Exception { } @Test - public void testAllocateRequestPBImpl() throws Exception { + void testAllocateRequestPBImpl() throws Exception { validatePBImplRecord(AllocateRequestPBImpl.class, AllocateRequestProto.class); } @Test - public void testAllocateResponsePBImpl() throws Exception { + void testAllocateResponsePBImpl() throws Exception { validatePBImplRecord(AllocateResponsePBImpl.class, AllocateResponseProto.class); } @Test - public void testCancelDelegationTokenRequestPBImpl() throws Exception { + void testCancelDelegationTokenRequestPBImpl() throws Exception { validatePBImplRecord(CancelDelegationTokenRequestPBImpl.class, CancelDelegationTokenRequestProto.class); } @Test - public void testCancelDelegationTokenResponsePBImpl() throws Exception { + void testCancelDelegationTokenResponsePBImpl() throws Exception { validatePBImplRecord(CancelDelegationTokenResponsePBImpl.class, CancelDelegationTokenResponseProto.class); } @Test - public void testFinishApplicationMasterRequestPBImpl() throws Exception { + void testFinishApplicationMasterRequestPBImpl() throws Exception { validatePBImplRecord(FinishApplicationMasterRequestPBImpl.class, FinishApplicationMasterRequestProto.class); } @Test - public void testFinishApplicationMasterResponsePBImpl() throws Exception { + void testFinishApplicationMasterResponsePBImpl() throws Exception { validatePBImplRecord(FinishApplicationMasterResponsePBImpl.class, FinishApplicationMasterResponseProto.class); } @Test - public void testGetApplicationAttemptReportRequestPBImpl() throws Exception { + void testGetApplicationAttemptReportRequestPBImpl() throws Exception { validatePBImplRecord(GetApplicationAttemptReportRequestPBImpl.class, GetApplicationAttemptReportRequestProto.class); } @Test - public void testGetApplicationAttemptReportResponsePBImpl() throws Exception { + void testGetApplicationAttemptReportResponsePBImpl() throws Exception { validatePBImplRecord(GetApplicationAttemptReportResponsePBImpl.class, GetApplicationAttemptReportResponseProto.class); } @Test - public void testGetApplicationAttemptsRequestPBImpl() throws Exception { + void testGetApplicationAttemptsRequestPBImpl() throws Exception { validatePBImplRecord(GetApplicationAttemptsRequestPBImpl.class, GetApplicationAttemptsRequestProto.class); } @Test - public void testGetApplicationAttemptsResponsePBImpl() throws Exception { + void testGetApplicationAttemptsResponsePBImpl() throws Exception { validatePBImplRecord(GetApplicationAttemptsResponsePBImpl.class, GetApplicationAttemptsResponseProto.class); } @Test - public void testGetApplicationReportRequestPBImpl() throws Exception { + void testGetApplicationReportRequestPBImpl() throws Exception { validatePBImplRecord(GetApplicationReportRequestPBImpl.class, GetApplicationReportRequestProto.class); } @Test - public void testGetApplicationReportResponsePBImpl() throws Exception { + void testGetApplicationReportResponsePBImpl() throws Exception { validatePBImplRecord(GetApplicationReportResponsePBImpl.class, GetApplicationReportResponseProto.class); } @Test - public void testGetApplicationsRequestPBImpl() throws Exception { + void testGetApplicationsRequestPBImpl() throws Exception { validatePBImplRecord(GetApplicationsRequestPBImpl.class, GetApplicationsRequestProto.class); } @Test - public void testGetApplicationsResponsePBImpl() throws Exception { + void testGetApplicationsResponsePBImpl() throws Exception { validatePBImplRecord(GetApplicationsResponsePBImpl.class, GetApplicationsResponseProto.class); } @Test - public void testGetClusterMetricsRequestPBImpl() throws Exception { + void testGetClusterMetricsRequestPBImpl() throws Exception { validatePBImplRecord(GetClusterMetricsRequestPBImpl.class, GetClusterMetricsRequestProto.class); } @Test - public void testGetClusterMetricsResponsePBImpl() throws Exception { + void testGetClusterMetricsResponsePBImpl() throws Exception { validatePBImplRecord(GetClusterMetricsResponsePBImpl.class, GetClusterMetricsResponseProto.class); } @Test - public void testGetClusterNodesRequestPBImpl() throws Exception { + void testGetClusterNodesRequestPBImpl() throws Exception { validatePBImplRecord(GetClusterNodesRequestPBImpl.class, GetClusterNodesRequestProto.class); } @Test - public void testGetClusterNodesResponsePBImpl() throws Exception { + void testGetClusterNodesResponsePBImpl() throws Exception { validatePBImplRecord(GetClusterNodesResponsePBImpl.class, GetClusterNodesResponseProto.class); } @Test - public void testGetContainerReportRequestPBImpl() throws Exception { + void testGetContainerReportRequestPBImpl() throws Exception { validatePBImplRecord(GetContainerReportRequestPBImpl.class, GetContainerReportRequestProto.class); } @Test - public void testGetContainerReportResponsePBImpl() throws Exception { + void testGetContainerReportResponsePBImpl() throws Exception { validatePBImplRecord(GetContainerReportResponsePBImpl.class, GetContainerReportResponseProto.class); } @Test - public void testGetContainersRequestPBImpl() throws Exception { + void testGetContainersRequestPBImpl() throws Exception { validatePBImplRecord(GetContainersRequestPBImpl.class, GetContainersRequestProto.class); } @Test - public void testGetContainersResponsePBImpl() throws Exception { + void testGetContainersResponsePBImpl() throws Exception { validatePBImplRecord(GetContainersResponsePBImpl.class, GetContainersResponseProto.class); } @Test - public void testGetContainerStatusesRequestPBImpl() throws Exception { + void testGetContainerStatusesRequestPBImpl() throws Exception { validatePBImplRecord(GetContainerStatusesRequestPBImpl.class, GetContainerStatusesRequestProto.class); } @Test - public void testGetContainerStatusesResponsePBImpl() throws Exception { + void testGetContainerStatusesResponsePBImpl() throws Exception { validatePBImplRecord(GetContainerStatusesResponsePBImpl.class, GetContainerStatusesResponseProto.class); } @Test - public void testGetDelegationTokenRequestPBImpl() throws Exception { + void testGetDelegationTokenRequestPBImpl() throws Exception { validatePBImplRecord(GetDelegationTokenRequestPBImpl.class, GetDelegationTokenRequestProto.class); } @Test - public void testGetDelegationTokenResponsePBImpl() throws Exception { + void testGetDelegationTokenResponsePBImpl() throws Exception { validatePBImplRecord(GetDelegationTokenResponsePBImpl.class, GetDelegationTokenResponseProto.class); } @Test - public void testGetNewApplicationRequestPBImpl() throws Exception { + void testGetNewApplicationRequestPBImpl() throws Exception { validatePBImplRecord(GetNewApplicationRequestPBImpl.class, GetNewApplicationRequestProto.class); } @Test - public void testGetNewApplicationResponsePBImpl() throws Exception { + void testGetNewApplicationResponsePBImpl() throws Exception { validatePBImplRecord(GetNewApplicationResponsePBImpl.class, GetNewApplicationResponseProto.class); } @Test - public void testGetQueueInfoRequestPBImpl() throws Exception { + void testGetQueueInfoRequestPBImpl() throws Exception { validatePBImplRecord(GetQueueInfoRequestPBImpl.class, GetQueueInfoRequestProto.class); } @Test - public void testGetQueueInfoResponsePBImpl() throws Exception { + void testGetQueueInfoResponsePBImpl() throws Exception { validatePBImplRecord(GetQueueInfoResponsePBImpl.class, GetQueueInfoResponseProto.class); } @Test - public void testGetQueueUserAclsInfoRequestPBImpl() throws Exception { + void testGetQueueUserAclsInfoRequestPBImpl() throws Exception { validatePBImplRecord(GetQueueUserAclsInfoRequestPBImpl.class, GetQueueUserAclsInfoRequestProto.class); } @Test - public void testGetQueueUserAclsInfoResponsePBImpl() throws Exception { + void testGetQueueUserAclsInfoResponsePBImpl() throws Exception { validatePBImplRecord(GetQueueUserAclsInfoResponsePBImpl.class, GetQueueUserAclsInfoResponseProto.class); } @Test - public void testKillApplicationRequestPBImpl() throws Exception { + void testKillApplicationRequestPBImpl() throws Exception { validatePBImplRecord(KillApplicationRequestPBImpl.class, KillApplicationRequestProto.class); } @Test - public void testKillApplicationResponsePBImpl() throws Exception { + void testKillApplicationResponsePBImpl() throws Exception { validatePBImplRecord(KillApplicationResponsePBImpl.class, KillApplicationResponseProto.class); } @Test - public void testMoveApplicationAcrossQueuesRequestPBImpl() throws Exception { + void testMoveApplicationAcrossQueuesRequestPBImpl() throws Exception { validatePBImplRecord(MoveApplicationAcrossQueuesRequestPBImpl.class, MoveApplicationAcrossQueuesRequestProto.class); } @Test - public void testMoveApplicationAcrossQueuesResponsePBImpl() throws Exception { + void testMoveApplicationAcrossQueuesResponsePBImpl() throws Exception { validatePBImplRecord(MoveApplicationAcrossQueuesResponsePBImpl.class, MoveApplicationAcrossQueuesResponseProto.class); } @Test - public void testRegisterApplicationMasterRequestPBImpl() throws Exception { + void testRegisterApplicationMasterRequestPBImpl() throws Exception { validatePBImplRecord(RegisterApplicationMasterRequestPBImpl.class, RegisterApplicationMasterRequestProto.class); } @Test - public void testRegisterApplicationMasterResponsePBImpl() throws Exception { + void testRegisterApplicationMasterResponsePBImpl() throws Exception { validatePBImplRecord(RegisterApplicationMasterResponsePBImpl.class, RegisterApplicationMasterResponseProto.class); } @Test - public void testRenewDelegationTokenRequestPBImpl() throws Exception { + void testRenewDelegationTokenRequestPBImpl() throws Exception { validatePBImplRecord(RenewDelegationTokenRequestPBImpl.class, RenewDelegationTokenRequestProto.class); } @Test - public void testRenewDelegationTokenResponsePBImpl() throws Exception { + void testRenewDelegationTokenResponsePBImpl() throws Exception { validatePBImplRecord(RenewDelegationTokenResponsePBImpl.class, RenewDelegationTokenResponseProto.class); } @Test - public void testStartContainerRequestPBImpl() throws Exception { + void testStartContainerRequestPBImpl() throws Exception { validatePBImplRecord(StartContainerRequestPBImpl.class, StartContainerRequestProto.class); } @Test - public void testStartContainersRequestPBImpl() throws Exception { + void testStartContainersRequestPBImpl() throws Exception { validatePBImplRecord(StartContainersRequestPBImpl.class, StartContainersRequestProto.class); } @Test - public void testStartContainersResponsePBImpl() throws Exception { + void testStartContainersResponsePBImpl() throws Exception { validatePBImplRecord(StartContainersResponsePBImpl.class, StartContainersResponseProto.class); } @Test - public void testStopContainersRequestPBImpl() throws Exception { + void testStopContainersRequestPBImpl() throws Exception { validatePBImplRecord(StopContainersRequestPBImpl.class, StopContainersRequestProto.class); } @Test - public void testStopContainersResponsePBImpl() throws Exception { + void testStopContainersResponsePBImpl() throws Exception { validatePBImplRecord(StopContainersResponsePBImpl.class, StopContainersResponseProto.class); } @Test - public void testIncreaseContainersResourceRequestPBImpl() throws Exception { + void testIncreaseContainersResourceRequestPBImpl() throws Exception { validatePBImplRecord(IncreaseContainersResourceRequestPBImpl.class, IncreaseContainersResourceRequestProto.class); } @Test - public void testIncreaseContainersResourceResponsePBImpl() throws Exception { + void testIncreaseContainersResourceResponsePBImpl() throws Exception { validatePBImplRecord(IncreaseContainersResourceResponsePBImpl.class, IncreaseContainersResourceResponseProto.class); } @Test - public void testSubmitApplicationRequestPBImpl() throws Exception { + void testSubmitApplicationRequestPBImpl() throws Exception { validatePBImplRecord(SubmitApplicationRequestPBImpl.class, SubmitApplicationRequestProto.class); } @Test - public void testSubmitApplicationResponsePBImpl() throws Exception { + void testSubmitApplicationResponsePBImpl() throws Exception { validatePBImplRecord(SubmitApplicationResponsePBImpl.class, SubmitApplicationResponseProto.class); } @@ -703,13 +701,13 @@ public void testSubmitApplicationResponsePBImpl() throws Exception { @Test @Ignore // ignore cause ApplicationIdPBImpl is immutable - public void testApplicationAttemptIdPBImpl() throws Exception { + void testApplicationAttemptIdPBImpl() throws Exception { validatePBImplRecord(ApplicationAttemptIdPBImpl.class, ApplicationAttemptIdProto.class); } @Test - public void testApplicationAttemptReportPBImpl() throws Exception { + void testApplicationAttemptReportPBImpl() throws Exception { validatePBImplRecord(ApplicationAttemptReportPBImpl.class, ApplicationAttemptReportProto.class); } @@ -717,24 +715,24 @@ public void testApplicationAttemptReportPBImpl() throws Exception { @Test @Ignore // ignore cause ApplicationIdPBImpl is immutable - public void testApplicationIdPBImpl() throws Exception { + void testApplicationIdPBImpl() throws Exception { validatePBImplRecord(ApplicationIdPBImpl.class, ApplicationIdProto.class); } @Test - public void testApplicationReportPBImpl() throws Exception { + void testApplicationReportPBImpl() throws Exception { validatePBImplRecord(ApplicationReportPBImpl.class, ApplicationReportProto.class); } @Test - public void testApplicationResourceUsageReportPBImpl() throws Exception { + void testApplicationResourceUsageReportPBImpl() throws Exception { validatePBImplRecord(ApplicationResourceUsageReportPBImpl.class, ApplicationResourceUsageReportProto.class); } @Test - public void testApplicationSubmissionContextPBImpl() throws Exception { + void testApplicationSubmissionContextPBImpl() throws Exception { validatePBImplRecord(ApplicationSubmissionContextPBImpl.class, ApplicationSubmissionContextProto.class); @@ -748,119 +746,119 @@ public void testApplicationSubmissionContextPBImpl() throws Exception { @Test @Ignore // ignore cause ApplicationIdPBImpl is immutable - public void testContainerIdPBImpl() throws Exception { + void testContainerIdPBImpl() throws Exception { validatePBImplRecord(ContainerIdPBImpl.class, ContainerIdProto.class); } @Test - public void testContainerRetryPBImpl() throws Exception { + void testContainerRetryPBImpl() throws Exception { validatePBImplRecord(ContainerRetryContextPBImpl.class, ContainerRetryContextProto.class); } @Test - public void testContainerLaunchContextPBImpl() throws Exception { + void testContainerLaunchContextPBImpl() throws Exception { validatePBImplRecord(ContainerLaunchContextPBImpl.class, ContainerLaunchContextProto.class); } @Test - public void testResourceLocalizationRequest() throws Exception { + void testResourceLocalizationRequest() throws Exception { validatePBImplRecord(ResourceLocalizationRequestPBImpl.class, YarnServiceProtos.ResourceLocalizationRequestProto.class); } @Test - public void testResourceLocalizationResponse() throws Exception { + void testResourceLocalizationResponse() throws Exception { validatePBImplRecord(ResourceLocalizationResponsePBImpl.class, YarnServiceProtos.ResourceLocalizationResponseProto.class); } @Test - public void testContainerPBImpl() throws Exception { + void testContainerPBImpl() throws Exception { validatePBImplRecord(ContainerPBImpl.class, ContainerProto.class); } @Test - public void testContainerReportPBImpl() throws Exception { + void testContainerReportPBImpl() throws Exception { validatePBImplRecord(ContainerReportPBImpl.class, ContainerReportProto.class); } @Test - public void testUpdateContainerRequestPBImpl() throws Exception { + void testUpdateContainerRequestPBImpl() throws Exception { validatePBImplRecord(UpdateContainerRequestPBImpl.class, YarnServiceProtos.UpdateContainerRequestProto.class); } @Test - public void testContainerStatusPBImpl() throws Exception { + void testContainerStatusPBImpl() throws Exception { validatePBImplRecord(ContainerStatusPBImpl.class, ContainerStatusProto.class); } @Test - public void testLocalResourcePBImpl() throws Exception { + void testLocalResourcePBImpl() throws Exception { validatePBImplRecord(LocalResourcePBImpl.class, LocalResourceProto.class); } @Test - public void testNMTokenPBImpl() throws Exception { + void testNMTokenPBImpl() throws Exception { validatePBImplRecord(NMTokenPBImpl.class, NMTokenProto.class); } @Test @Ignore // ignore cause ApplicationIdPBImpl is immutable - public void testNodeIdPBImpl() throws Exception { + void testNodeIdPBImpl() throws Exception { validatePBImplRecord(NodeIdPBImpl.class, NodeIdProto.class); } @Test - public void testNodeReportPBImpl() throws Exception { + void testNodeReportPBImpl() throws Exception { validatePBImplRecord(NodeReportPBImpl.class, NodeReportProto.class); } @Test - public void testPreemptionContainerPBImpl() throws Exception { + void testPreemptionContainerPBImpl() throws Exception { validatePBImplRecord(PreemptionContainerPBImpl.class, PreemptionContainerProto.class); } @Test - public void testPreemptionContractPBImpl() throws Exception { + void testPreemptionContractPBImpl() throws Exception { validatePBImplRecord(PreemptionContractPBImpl.class, PreemptionContractProto.class); } @Test - public void testPreemptionMessagePBImpl() throws Exception { + void testPreemptionMessagePBImpl() throws Exception { validatePBImplRecord(PreemptionMessagePBImpl.class, PreemptionMessageProto.class); } @Test - public void testPreemptionResourceRequestPBImpl() throws Exception { + void testPreemptionResourceRequestPBImpl() throws Exception { validatePBImplRecord(PreemptionResourceRequestPBImpl.class, PreemptionResourceRequestProto.class); } @Test - public void testPriorityPBImpl() throws Exception { + void testPriorityPBImpl() throws Exception { validatePBImplRecord(PriorityPBImpl.class, PriorityProto.class); } @Test - public void testQueueInfoPBImpl() throws Exception { + void testQueueInfoPBImpl() throws Exception { validatePBImplRecord(QueueInfoPBImpl.class, QueueInfoProto.class); } @Test - public void testQueueUserACLInfoPBImpl() throws Exception { + void testQueueUserACLInfoPBImpl() throws Exception { validatePBImplRecord(QueueUserACLInfoPBImpl.class, QueueUserACLInfoProto.class); } @Test - public void testResourceBlacklistRequestPBImpl() throws Exception { + void testResourceBlacklistRequestPBImpl() throws Exception { validatePBImplRecord(ResourceBlacklistRequestPBImpl.class, ResourceBlacklistRequestProto.class); } @@ -868,286 +866,286 @@ public void testResourceBlacklistRequestPBImpl() throws Exception { @Test @Ignore // ignore as ResourceOptionPBImpl is immutable - public void testResourceOptionPBImpl() throws Exception { + void testResourceOptionPBImpl() throws Exception { validatePBImplRecord(ResourceOptionPBImpl.class, ResourceOptionProto.class); } @Test - public void testResourcePBImpl() throws Exception { + void testResourcePBImpl() throws Exception { validatePBImplRecord(ResourcePBImpl.class, ResourceProto.class); } @Test - public void testResourceRequestPBImpl() throws Exception { + void testResourceRequestPBImpl() throws Exception { validatePBImplRecord(ResourceRequestPBImpl.class, ResourceRequestProto.class); } @Test - public void testSerializedExceptionPBImpl() throws Exception { + void testSerializedExceptionPBImpl() throws Exception { validatePBImplRecord(SerializedExceptionPBImpl.class, SerializedExceptionProto.class); } @Test - public void testStrictPreemptionContractPBImpl() throws Exception { + void testStrictPreemptionContractPBImpl() throws Exception { validatePBImplRecord(StrictPreemptionContractPBImpl.class, StrictPreemptionContractProto.class); } @Test - public void testTokenPBImpl() throws Exception { + void testTokenPBImpl() throws Exception { validatePBImplRecord(TokenPBImpl.class, TokenProto.class); } @Test - public void testURLPBImpl() throws Exception { + void testURLPBImpl() throws Exception { validatePBImplRecord(URLPBImpl.class, URLProto.class); } @Test - public void testYarnClusterMetricsPBImpl() throws Exception { + void testYarnClusterMetricsPBImpl() throws Exception { validatePBImplRecord(YarnClusterMetricsPBImpl.class, YarnClusterMetricsProto.class); } @Test - public void testRefreshAdminAclsRequestPBImpl() throws Exception { + void testRefreshAdminAclsRequestPBImpl() throws Exception { validatePBImplRecord(RefreshAdminAclsRequestPBImpl.class, RefreshAdminAclsRequestProto.class); } @Test - public void testRefreshAdminAclsResponsePBImpl() throws Exception { + void testRefreshAdminAclsResponsePBImpl() throws Exception { validatePBImplRecord(RefreshAdminAclsResponsePBImpl.class, RefreshAdminAclsResponseProto.class); } @Test - public void testRefreshNodesRequestPBImpl() throws Exception { + void testRefreshNodesRequestPBImpl() throws Exception { validatePBImplRecord(RefreshNodesRequestPBImpl.class, RefreshNodesRequestProto.class); } @Test - public void testRefreshNodesResponsePBImpl() throws Exception { + void testRefreshNodesResponsePBImpl() throws Exception { validatePBImplRecord(RefreshNodesResponsePBImpl.class, RefreshNodesResponseProto.class); } @Test - public void testRefreshQueuesRequestPBImpl() throws Exception { + void testRefreshQueuesRequestPBImpl() throws Exception { validatePBImplRecord(RefreshQueuesRequestPBImpl.class, RefreshQueuesRequestProto.class); } @Test - public void testRefreshQueuesResponsePBImpl() throws Exception { + void testRefreshQueuesResponsePBImpl() throws Exception { validatePBImplRecord(RefreshQueuesResponsePBImpl.class, RefreshQueuesResponseProto.class); } @Test - public void testRefreshNodesResourcesRequestPBImpl() throws Exception { + void testRefreshNodesResourcesRequestPBImpl() throws Exception { validatePBImplRecord(RefreshNodesResourcesRequestPBImpl.class, RefreshNodesResourcesRequestProto.class); } @Test - public void testRefreshNodesResourcesResponsePBImpl() throws Exception { + void testRefreshNodesResourcesResponsePBImpl() throws Exception { validatePBImplRecord(RefreshNodesResourcesResponsePBImpl.class, RefreshNodesResourcesResponseProto.class); } @Test - public void testRefreshServiceAclsRequestPBImpl() throws Exception { + void testRefreshServiceAclsRequestPBImpl() throws Exception { validatePBImplRecord(RefreshServiceAclsRequestPBImpl.class, RefreshServiceAclsRequestProto.class); } @Test - public void testRefreshServiceAclsResponsePBImpl() throws Exception { + void testRefreshServiceAclsResponsePBImpl() throws Exception { validatePBImplRecord(RefreshServiceAclsResponsePBImpl.class, RefreshServiceAclsResponseProto.class); } @Test - public void testRefreshSuperUserGroupsConfigurationRequestPBImpl() + void testRefreshSuperUserGroupsConfigurationRequestPBImpl() throws Exception { validatePBImplRecord(RefreshSuperUserGroupsConfigurationRequestPBImpl.class, RefreshSuperUserGroupsConfigurationRequestProto.class); } @Test - public void testRefreshSuperUserGroupsConfigurationResponsePBImpl() + void testRefreshSuperUserGroupsConfigurationResponsePBImpl() throws Exception { validatePBImplRecord(RefreshSuperUserGroupsConfigurationResponsePBImpl.class, RefreshSuperUserGroupsConfigurationResponseProto.class); } @Test - public void testRefreshUserToGroupsMappingsRequestPBImpl() throws Exception { + void testRefreshUserToGroupsMappingsRequestPBImpl() throws Exception { validatePBImplRecord(RefreshUserToGroupsMappingsRequestPBImpl.class, RefreshUserToGroupsMappingsRequestProto.class); } @Test - public void testRefreshUserToGroupsMappingsResponsePBImpl() throws Exception { + void testRefreshUserToGroupsMappingsResponsePBImpl() throws Exception { validatePBImplRecord(RefreshUserToGroupsMappingsResponsePBImpl.class, RefreshUserToGroupsMappingsResponseProto.class); } @Test - public void testUpdateNodeResourceRequestPBImpl() throws Exception { + void testUpdateNodeResourceRequestPBImpl() throws Exception { validatePBImplRecord(UpdateNodeResourceRequestPBImpl.class, UpdateNodeResourceRequestProto.class); } @Test - public void testUpdateNodeResourceResponsePBImpl() throws Exception { + void testUpdateNodeResourceResponsePBImpl() throws Exception { validatePBImplRecord(UpdateNodeResourceResponsePBImpl.class, UpdateNodeResourceResponseProto.class); } @Test - public void testReservationSubmissionRequestPBImpl() throws Exception { + void testReservationSubmissionRequestPBImpl() throws Exception { validatePBImplRecord(ReservationSubmissionRequestPBImpl.class, ReservationSubmissionRequestProto.class); } @Test - public void testReservationSubmissionResponsePBImpl() throws Exception { + void testReservationSubmissionResponsePBImpl() throws Exception { validatePBImplRecord(ReservationSubmissionResponsePBImpl.class, ReservationSubmissionResponseProto.class); } @Test - public void testReservationUpdateRequestPBImpl() throws Exception { + void testReservationUpdateRequestPBImpl() throws Exception { validatePBImplRecord(ReservationUpdateRequestPBImpl.class, ReservationUpdateRequestProto.class); } @Test - public void testReservationUpdateResponsePBImpl() throws Exception { + void testReservationUpdateResponsePBImpl() throws Exception { validatePBImplRecord(ReservationUpdateResponsePBImpl.class, ReservationUpdateResponseProto.class); } @Test - public void testReservationDeleteRequestPBImpl() throws Exception { + void testReservationDeleteRequestPBImpl() throws Exception { validatePBImplRecord(ReservationDeleteRequestPBImpl.class, ReservationDeleteRequestProto.class); } @Test - public void testReservationDeleteResponsePBImpl() throws Exception { + void testReservationDeleteResponsePBImpl() throws Exception { validatePBImplRecord(ReservationDeleteResponsePBImpl.class, ReservationDeleteResponseProto.class); } @Test - public void testReservationListRequestPBImpl() throws Exception { + void testReservationListRequestPBImpl() throws Exception { validatePBImplRecord(ReservationListRequestPBImpl.class, ReservationListRequestProto.class); } @Test - public void testReservationListResponsePBImpl() throws Exception { + void testReservationListResponsePBImpl() throws Exception { validatePBImplRecord(ReservationListResponsePBImpl.class, ReservationListResponseProto.class); } @Test - public void testAddToClusterNodeLabelsRequestPBImpl() throws Exception { + void testAddToClusterNodeLabelsRequestPBImpl() throws Exception { validatePBImplRecord(AddToClusterNodeLabelsRequestPBImpl.class, AddToClusterNodeLabelsRequestProto.class); } @Test - public void testAddToClusterNodeLabelsResponsePBImpl() throws Exception { + void testAddToClusterNodeLabelsResponsePBImpl() throws Exception { validatePBImplRecord(AddToClusterNodeLabelsResponsePBImpl.class, AddToClusterNodeLabelsResponseProto.class); } @Test - public void testRemoveFromClusterNodeLabelsRequestPBImpl() throws Exception { + void testRemoveFromClusterNodeLabelsRequestPBImpl() throws Exception { validatePBImplRecord(RemoveFromClusterNodeLabelsRequestPBImpl.class, RemoveFromClusterNodeLabelsRequestProto.class); } @Test - public void testRemoveFromClusterNodeLabelsResponsePBImpl() throws Exception { + void testRemoveFromClusterNodeLabelsResponsePBImpl() throws Exception { validatePBImplRecord(RemoveFromClusterNodeLabelsResponsePBImpl.class, RemoveFromClusterNodeLabelsResponseProto.class); } @Test - public void testGetClusterNodeLabelsRequestPBImpl() throws Exception { + void testGetClusterNodeLabelsRequestPBImpl() throws Exception { validatePBImplRecord(GetClusterNodeLabelsRequestPBImpl.class, GetClusterNodeLabelsRequestProto.class); } @Test - public void testGetClusterNodeLabelsResponsePBImpl() throws Exception { + void testGetClusterNodeLabelsResponsePBImpl() throws Exception { validatePBImplRecord(GetClusterNodeLabelsResponsePBImpl.class, GetClusterNodeLabelsResponseProto.class); } @Test - public void testReplaceLabelsOnNodeRequestPBImpl() throws Exception { + void testReplaceLabelsOnNodeRequestPBImpl() throws Exception { validatePBImplRecord(ReplaceLabelsOnNodeRequestPBImpl.class, ReplaceLabelsOnNodeRequestProto.class); } @Test - public void testReplaceLabelsOnNodeResponsePBImpl() throws Exception { + void testReplaceLabelsOnNodeResponsePBImpl() throws Exception { validatePBImplRecord(ReplaceLabelsOnNodeResponsePBImpl.class, ReplaceLabelsOnNodeResponseProto.class); } @Test - public void testGetNodeToLabelsRequestPBImpl() throws Exception { + void testGetNodeToLabelsRequestPBImpl() throws Exception { validatePBImplRecord(GetNodesToLabelsRequestPBImpl.class, GetNodesToLabelsRequestProto.class); } @Test - public void testGetNodeToLabelsResponsePBImpl() throws Exception { + void testGetNodeToLabelsResponsePBImpl() throws Exception { validatePBImplRecord(GetNodesToLabelsResponsePBImpl.class, GetNodesToLabelsResponseProto.class); } @Test - public void testGetLabelsToNodesRequestPBImpl() throws Exception { + void testGetLabelsToNodesRequestPBImpl() throws Exception { validatePBImplRecord(GetLabelsToNodesRequestPBImpl.class, GetLabelsToNodesRequestProto.class); } @Test - public void testGetLabelsToNodesResponsePBImpl() throws Exception { + void testGetLabelsToNodesResponsePBImpl() throws Exception { validatePBImplRecord(GetLabelsToNodesResponsePBImpl.class, GetLabelsToNodesResponseProto.class); } @Test - public void testNodeLabelAttributesPBImpl() throws Exception { + void testNodeLabelAttributesPBImpl() throws Exception { validatePBImplRecord(NodeLabelPBImpl.class, NodeLabelProto.class); } @Test - public void testCheckForDecommissioningNodesRequestPBImpl() throws Exception { + void testCheckForDecommissioningNodesRequestPBImpl() throws Exception { validatePBImplRecord(CheckForDecommissioningNodesRequestPBImpl.class, CheckForDecommissioningNodesRequestProto.class); } @Test - public void testCheckForDecommissioningNodesResponsePBImpl() throws Exception { + void testCheckForDecommissioningNodesResponsePBImpl() throws Exception { validatePBImplRecord(CheckForDecommissioningNodesResponsePBImpl.class, CheckForDecommissioningNodesResponseProto.class); } @Test - public void testExecutionTypeRequestPBImpl() throws Exception { + void testExecutionTypeRequestPBImpl() throws Exception { validatePBImplRecord(ExecutionTypeRequestPBImpl.class, ExecutionTypeRequestProto.class); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestTimelineEntityGroupId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestTimelineEntityGroupId.java index 55b149640d3..372b7668778 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestTimelineEntityGroupId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestTimelineEntityGroupId.java @@ -20,13 +20,16 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestTimelineEntityGroupId { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class TestTimelineEntityGroupId { @Test - public void testTimelineEntityGroupId() { + void testTimelineEntityGroupId() { ApplicationId appId1 = ApplicationId.newInstance(1234, 1); ApplicationId appId2 = ApplicationId.newInstance(1234, 2); TimelineEntityGroupId group1 = TimelineEntityGroupId.newInstance(appId1, "1"); @@ -34,19 +37,19 @@ public void testTimelineEntityGroupId() { TimelineEntityGroupId group3 = TimelineEntityGroupId.newInstance(appId2, "1"); TimelineEntityGroupId group4 = TimelineEntityGroupId.newInstance(appId1, "1"); - Assert.assertTrue(group1.equals(group4)); - Assert.assertFalse(group1.equals(group2)); - Assert.assertFalse(group1.equals(group3)); + assertTrue(group1.equals(group4)); + assertFalse(group1.equals(group2)); + assertFalse(group1.equals(group3)); - Assert.assertTrue(group1.compareTo(group4) == 0); - Assert.assertTrue(group1.compareTo(group2) < 0); - Assert.assertTrue(group1.compareTo(group3) < 0); + assertTrue(group1.compareTo(group4) == 0); + assertTrue(group1.compareTo(group2) < 0); + assertTrue(group1.compareTo(group3) < 0); - Assert.assertTrue(group1.hashCode() == group4.hashCode()); - Assert.assertFalse(group1.hashCode() == group2.hashCode()); - Assert.assertFalse(group1.hashCode() == group3.hashCode()); + assertTrue(group1.hashCode() == group4.hashCode()); + assertFalse(group1.hashCode() == group2.hashCode()); + assertFalse(group1.hashCode() == group3.hashCode()); - Assert.assertEquals("timelineEntityGroupId_1234_1_1", group1.toString()); - Assert.assertEquals(TimelineEntityGroupId.fromString("timelineEntityGroupId_1234_1_1"), group1); + assertEquals("timelineEntityGroupId_1234_1_1", group1.toString()); + assertEquals(TimelineEntityGroupId.fromString("timelineEntityGroupId_1234_1_1"), group1); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java index 5934846e2f3..d07ad050d7e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/TestResourceUtilization.java @@ -18,46 +18,50 @@ package org.apache.hadoop.yarn.api.records; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestResourceUtilization { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class TestResourceUtilization { @Test - public void testResourceUtilization() { + void testResourceUtilization() { ResourceUtilization u1 = ResourceUtilization.newInstance(10, 20, 0.5f); ResourceUtilization u2 = ResourceUtilization.newInstance(u1); ResourceUtilization u3 = ResourceUtilization.newInstance(10, 20, 0.5f); ResourceUtilization u4 = ResourceUtilization.newInstance(20, 20, 0.5f); ResourceUtilization u5 = ResourceUtilization.newInstance(30, 40, 0.8f); - Assert.assertEquals(u1, u2); - Assert.assertEquals(u1, u3); - Assert.assertNotEquals(u1, u4); - Assert.assertNotEquals(u2, u5); - Assert.assertNotEquals(u4, u5); + assertEquals(u1, u2); + assertEquals(u1, u3); + assertNotEquals(u1, u4); + assertNotEquals(u2, u5); + assertNotEquals(u4, u5); - Assert.assertTrue(u1.hashCode() == u2.hashCode()); - Assert.assertTrue(u1.hashCode() == u3.hashCode()); - Assert.assertFalse(u1.hashCode() == u4.hashCode()); - Assert.assertFalse(u2.hashCode() == u5.hashCode()); - Assert.assertFalse(u4.hashCode() == u5.hashCode()); + assertTrue(u1.hashCode() == u2.hashCode()); + assertTrue(u1.hashCode() == u3.hashCode()); + assertFalse(u1.hashCode() == u4.hashCode()); + assertFalse(u2.hashCode() == u5.hashCode()); + assertFalse(u4.hashCode() == u5.hashCode()); - Assert.assertTrue(u1.getPhysicalMemory() == 10); - Assert.assertFalse(u1.getVirtualMemory() == 10); - Assert.assertTrue(u1.getCPU() == 0.5f); + assertTrue(u1.getPhysicalMemory() == 10); + assertFalse(u1.getVirtualMemory() == 10); + assertTrue(u1.getCPU() == 0.5f); - Assert.assertEquals("", u1.toString()); u1.addTo(10, 0, 0.0f); - Assert.assertNotEquals(u1, u2); - Assert.assertEquals(u1, u4); + assertNotEquals(u1, u2); + assertEquals(u1, u4); u1.addTo(10, 20, 0.3f); - Assert.assertEquals(u1, u5); + assertEquals(u1, u5); u1.subtractFrom(10, 20, 0.3f); - Assert.assertEquals(u1, u4); + assertEquals(u1, u4); u1.subtractFrom(10, 0, 0.0f); - Assert.assertEquals(u1, u3); + assertEquals(u1, u3); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestApplicationClientProtocolRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestApplicationClientProtocolRecords.java index 6c51516434e..a3955b21a06 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestApplicationClientProtocolRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestApplicationClientProtocolRecords.java @@ -36,10 +36,13 @@ import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestApplicationClientProtocolRecords { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +class TestApplicationClientProtocolRecords { /* * This test validates the scenario in which the client sets a null value for a @@ -47,7 +50,7 @@ * */ @Test - public void testCLCPBImplNullEnv() throws IOException { + void testCLCPBImplNullEnv() throws IOException { Map localResources = Collections.emptyMap(); Map environment = new HashMap(); List commands = Collections.emptyList(); @@ -68,7 +71,7 @@ public void testCLCPBImplNullEnv() throws IOException { ContainerLaunchContext clcProto = new ContainerLaunchContextPBImpl( ((ContainerLaunchContextPBImpl) clc).getProto()); - Assert.assertEquals("", + assertEquals("", clcProto.getEnvironment().get("testCLCPBImplNullEnv")); } @@ -78,7 +81,7 @@ public void testCLCPBImplNullEnv() throws IOException { * local resource URL. */ @Test - public void testCLCPBImplNullResourceURL() throws IOException { + void testCLCPBImplNullResourceURL() throws IOException { RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); try { LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -92,9 +95,9 @@ public void testCLCPBImplNullResourceURL() throws IOException { localResources.put("null_url_resource", rsrc_alpha); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); containerLaunchContext.setLocalResources(localResources); - Assert.fail("Setting an invalid local resource should be an error!"); + fail("Setting an invalid local resource should be an error!"); } catch (NullPointerException e) { - Assert.assertTrue(e.getMessage().contains("Null resource URL for local resource")); + assertTrue(e.getMessage().contains("Null resource URL for local resource")); } } @@ -103,7 +106,7 @@ public void testCLCPBImplNullResourceURL() throws IOException { * local resource type. */ @Test - public void testCLCPBImplNullResourceType() throws IOException { + void testCLCPBImplNullResourceType() throws IOException { RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); try { LocalResource resource = recordFactory.newRecordInstance(LocalResource.class); @@ -117,9 +120,9 @@ public void testCLCPBImplNullResourceType() throws IOException { localResources.put("null_type_resource", resource); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); containerLaunchContext.setLocalResources(localResources); - Assert.fail("Setting an invalid local resource should be an error!"); + fail("Setting an invalid local resource should be an error!"); } catch (NullPointerException e) { - Assert.assertTrue(e.getMessage().contains("Null resource type for local resource")); + assertTrue(e.getMessage().contains("Null resource type for local resource")); } } @@ -128,7 +131,7 @@ public void testCLCPBImplNullResourceType() throws IOException { * local resource type. */ @Test - public void testCLCPBImplNullResourceVisibility() throws IOException { + void testCLCPBImplNullResourceVisibility() throws IOException { RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); try { LocalResource resource = recordFactory.newRecordInstance(LocalResource.class); @@ -142,9 +145,9 @@ public void testCLCPBImplNullResourceVisibility() throws IOException { localResources.put("null_visibility_resource", resource); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); containerLaunchContext.setLocalResources(localResources); - Assert.fail("Setting an invalid local resource should be an error!"); + fail("Setting an invalid local resource should be an error!"); } catch (NullPointerException e) { - Assert.assertTrue(e.getMessage().contains("Null resource visibility for local resource")); + assertTrue(e.getMessage().contains("Null resource visibility for local resource")); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestSerializedExceptionPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestSerializedExceptionPBImpl.java index ecfa63e0329..36564660aef 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestSerializedExceptionPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/impl/pb/TestSerializedExceptionPBImpl.java @@ -22,69 +22,71 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestSerializedExceptionPBImpl { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; + +class TestSerializedExceptionPBImpl { @Test - public void testSerializedException() throws Exception { + void testSerializedException() throws Exception { SerializedExceptionPBImpl orig = new SerializedExceptionPBImpl(); orig.init(new Exception("test exception")); SerializedExceptionProto proto = orig.getProto(); SerializedExceptionPBImpl deser = new SerializedExceptionPBImpl(proto); - Assert.assertEquals(orig, deser); - Assert.assertEquals(orig.getMessage(), deser.getMessage()); - Assert.assertEquals(orig.getRemoteTrace(), deser.getRemoteTrace()); - Assert.assertEquals(orig.getCause(), deser.getCause()); + assertEquals(orig, deser); + assertEquals(orig.getMessage(), deser.getMessage()); + assertEquals(orig.getRemoteTrace(), deser.getRemoteTrace()); + assertEquals(orig.getCause(), deser.getCause()); } @Test - public void testDeserialize() throws Exception { + void testDeserialize() throws Exception { Exception ex = new Exception("test exception"); SerializedExceptionPBImpl pb = new SerializedExceptionPBImpl(); try { pb.deSerialize(); - Assert.fail("deSerialze should throw YarnRuntimeException"); + fail("deSerialze should throw YarnRuntimeException"); } catch (YarnRuntimeException e) { - Assert.assertEquals(ClassNotFoundException.class, - e.getCause().getClass()); + assertEquals(ClassNotFoundException.class, e.getCause().getClass()); } pb.init(ex); - Assert.assertEquals(ex.toString(), pb.deSerialize().toString()); + assertEquals(ex.toString(), pb.deSerialize().toString()); } @Test - public void testDeserializeWithDefaultConstructor() { + void testDeserializeWithDefaultConstructor() { // Init SerializedException with an Exception with default constructor. ClosedChannelException ex = new ClosedChannelException(); SerializedExceptionPBImpl pb = new SerializedExceptionPBImpl(); pb.init(ex); - Assert.assertEquals(ex.getClass(), pb.deSerialize().getClass()); + assertEquals(ex.getClass(), pb.deSerialize().getClass()); } @Test - public void testBeforeInit() throws Exception { + void testBeforeInit() throws Exception { SerializedExceptionProto defaultProto = SerializedExceptionProto.newBuilder().build(); SerializedExceptionPBImpl pb1 = new SerializedExceptionPBImpl(); - Assert.assertNull(pb1.getCause()); + assertNull(pb1.getCause()); SerializedExceptionPBImpl pb2 = new SerializedExceptionPBImpl(); - Assert.assertEquals(defaultProto, pb2.getProto()); + assertEquals(defaultProto, pb2.getProto()); SerializedExceptionPBImpl pb3 = new SerializedExceptionPBImpl(); - Assert.assertEquals(defaultProto.getTrace(), pb3.getRemoteTrace()); + assertEquals(defaultProto.getTrace(), pb3.getRemoteTrace()); } @Test - public void testThrowableDeserialization() { + void testThrowableDeserialization() { // java.lang.Error should also be serializable Error ex = new Error(); SerializedExceptionPBImpl pb = new SerializedExceptionPBImpl(); pb.init(ex); - Assert.assertEquals(ex.getClass(), pb.deSerialize().getClass()); + assertEquals(ex.getClass(), pb.deSerialize().getClass()); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java index 9d16edb7163..473d5abb393 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timeline/TestTimelineRecords.java @@ -31,16 +31,19 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestTimelineRecords { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class TestTimelineRecords { private static final Log LOG = LogFactory.getLog(TestTimelineRecords.class); @Test - public void testEntities() throws Exception { + void testEntities() throws Exception { TimelineEntities entities = new TimelineEntities(); for (int j = 0; j < 2; ++j) { TimelineEntity entity = new TimelineEntity(); @@ -67,27 +70,27 @@ public void testEntities() throws Exception { LOG.info("Entities in JSON:"); LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true)); - Assert.assertEquals(2, entities.getEntities().size()); + assertEquals(2, entities.getEntities().size()); TimelineEntity entity1 = entities.getEntities().get(0); - Assert.assertEquals("entity id 0", entity1.getEntityId()); - Assert.assertEquals("entity type 0", entity1.getEntityType()); - Assert.assertEquals(2, entity1.getRelatedEntities().size()); - Assert.assertEquals(2, entity1.getEvents().size()); - Assert.assertEquals(2, entity1.getPrimaryFilters().size()); - Assert.assertEquals(2, entity1.getOtherInfo().size()); - Assert.assertEquals("domain id 0", entity1.getDomainId()); + assertEquals("entity id 0", entity1.getEntityId()); + assertEquals("entity type 0", entity1.getEntityType()); + assertEquals(2, entity1.getRelatedEntities().size()); + assertEquals(2, entity1.getEvents().size()); + assertEquals(2, entity1.getPrimaryFilters().size()); + assertEquals(2, entity1.getOtherInfo().size()); + assertEquals("domain id 0", entity1.getDomainId()); TimelineEntity entity2 = entities.getEntities().get(1); - Assert.assertEquals("entity id 1", entity2.getEntityId()); - Assert.assertEquals("entity type 1", entity2.getEntityType()); - Assert.assertEquals(2, entity2.getRelatedEntities().size()); - Assert.assertEquals(2, entity2.getEvents().size()); - Assert.assertEquals(2, entity2.getPrimaryFilters().size()); - Assert.assertEquals(2, entity2.getOtherInfo().size()); - Assert.assertEquals("domain id 1", entity2.getDomainId()); + assertEquals("entity id 1", entity2.getEntityId()); + assertEquals("entity type 1", entity2.getEntityType()); + assertEquals(2, entity2.getRelatedEntities().size()); + assertEquals(2, entity2.getEvents().size()); + assertEquals(2, entity2.getPrimaryFilters().size()); + assertEquals(2, entity2.getOtherInfo().size()); + assertEquals("domain id 1", entity2.getDomainId()); } @Test - public void testEvents() throws Exception { + void testEvents() throws Exception { TimelineEvents events = new TimelineEvents(); for (int j = 0; j < 2; ++j) { TimelineEvents.EventsOfOneEntity partEvents = @@ -107,31 +110,31 @@ public void testEvents() throws Exception { LOG.info("Events in JSON:"); LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(events, true)); - Assert.assertEquals(2, events.getAllEvents().size()); + assertEquals(2, events.getAllEvents().size()); TimelineEvents.EventsOfOneEntity partEvents1 = events.getAllEvents().get(0); - Assert.assertEquals("entity id 0", partEvents1.getEntityId()); - Assert.assertEquals("entity type 0", partEvents1.getEntityType()); - Assert.assertEquals(2, partEvents1.getEvents().size()); + assertEquals("entity id 0", partEvents1.getEntityId()); + assertEquals("entity type 0", partEvents1.getEntityType()); + assertEquals(2, partEvents1.getEvents().size()); TimelineEvent event11 = partEvents1.getEvents().get(0); - Assert.assertEquals("event type 0", event11.getEventType()); - Assert.assertEquals(2, event11.getEventInfo().size()); + assertEquals("event type 0", event11.getEventType()); + assertEquals(2, event11.getEventInfo().size()); TimelineEvent event12 = partEvents1.getEvents().get(1); - Assert.assertEquals("event type 1", event12.getEventType()); - Assert.assertEquals(2, event12.getEventInfo().size()); + assertEquals("event type 1", event12.getEventType()); + assertEquals(2, event12.getEventInfo().size()); TimelineEvents.EventsOfOneEntity partEvents2 = events.getAllEvents().get(1); - Assert.assertEquals("entity id 1", partEvents2.getEntityId()); - Assert.assertEquals("entity type 1", partEvents2.getEntityType()); - Assert.assertEquals(2, partEvents2.getEvents().size()); + assertEquals("entity id 1", partEvents2.getEntityId()); + assertEquals("entity type 1", partEvents2.getEntityType()); + assertEquals(2, partEvents2.getEvents().size()); TimelineEvent event21 = partEvents2.getEvents().get(0); - Assert.assertEquals("event type 0", event21.getEventType()); - Assert.assertEquals(2, event21.getEventInfo().size()); + assertEquals("event type 0", event21.getEventType()); + assertEquals(2, event21.getEventInfo().size()); TimelineEvent event22 = partEvents2.getEvents().get(1); - Assert.assertEquals("event type 1", event22.getEventType()); - Assert.assertEquals(2, event22.getEventInfo().size()); + assertEquals("event type 1", event22.getEventType()); + assertEquals(2, event22.getEventInfo().size()); } @Test - public void testTimelinePutErrors() throws Exception { + void testTimelinePutErrors() throws Exception { TimelinePutResponse TimelinePutErrors = new TimelinePutResponse(); TimelinePutError error1 = new TimelinePutError(); error1.setEntityId("entity id 1"); @@ -149,23 +152,23 @@ public void testTimelinePutErrors() throws Exception { LOG.info("Errors in JSON:"); LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(TimelinePutErrors, true)); - Assert.assertEquals(3, TimelinePutErrors.getErrors().size()); + assertEquals(3, TimelinePutErrors.getErrors().size()); TimelinePutError e = TimelinePutErrors.getErrors().get(0); - Assert.assertEquals(error1.getEntityId(), e.getEntityId()); - Assert.assertEquals(error1.getEntityType(), e.getEntityType()); - Assert.assertEquals(error1.getErrorCode(), e.getErrorCode()); + assertEquals(error1.getEntityId(), e.getEntityId()); + assertEquals(error1.getEntityType(), e.getEntityType()); + assertEquals(error1.getErrorCode(), e.getErrorCode()); e = TimelinePutErrors.getErrors().get(1); - Assert.assertEquals(error1.getEntityId(), e.getEntityId()); - Assert.assertEquals(error1.getEntityType(), e.getEntityType()); - Assert.assertEquals(error1.getErrorCode(), e.getErrorCode()); + assertEquals(error1.getEntityId(), e.getEntityId()); + assertEquals(error1.getEntityType(), e.getEntityType()); + assertEquals(error1.getErrorCode(), e.getErrorCode()); e = TimelinePutErrors.getErrors().get(2); - Assert.assertEquals(error2.getEntityId(), e.getEntityId()); - Assert.assertEquals(error2.getEntityType(), e.getEntityType()); - Assert.assertEquals(error2.getErrorCode(), e.getErrorCode()); + assertEquals(error2.getEntityId(), e.getEntityId()); + assertEquals(error2.getEntityType(), e.getEntityType()); + assertEquals(error2.getErrorCode(), e.getErrorCode()); } @Test - public void testTimelineDomain() throws Exception { + void testTimelineDomain() throws Exception { TimelineDomains domains = new TimelineDomains(); TimelineDomain domain = null; @@ -185,25 +188,25 @@ public void testTimelineDomain() throws Exception { LOG.info("Domain in JSON:"); LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(domains, true)); - Assert.assertEquals(2, domains.getDomains().size()); + assertEquals(2, domains.getDomains().size()); for (int i = 0; i < domains.getDomains().size(); ++i) { domain = domains.getDomains().get(i); - Assert.assertEquals("test id " + (i + 1), domain.getId()); - Assert.assertEquals("test description " + (i + 1), + assertEquals("test id " + (i + 1), domain.getId()); + assertEquals("test description " + (i + 1), domain.getDescription()); - Assert.assertEquals("test owner " + (i + 1), domain.getOwner()); - Assert.assertEquals("test_reader_user_" + (i + 1) + + assertEquals("test owner " + (i + 1), domain.getOwner()); + assertEquals("test_reader_user_" + (i + 1) + " test_reader_group+" + (i + 1), domain.getReaders()); - Assert.assertEquals("test_writer_user_" + (i + 1) + + assertEquals("test_writer_user_" + (i + 1) + " test_writer_group+" + (i + 1), domain.getWriters()); - Assert.assertEquals(new Long(0L), domain.getCreatedTime()); - Assert.assertEquals(new Long(1L), domain.getModifiedTime()); + assertEquals(new Long(0L), domain.getCreatedTime()); + assertEquals(new Long(1L), domain.getModifiedTime()); } } @Test - public void testMapInterfaceOrTimelineRecords() throws Exception { + void testMapInterfaceOrTimelineRecords() throws Exception { TimelineEntity entity = new TimelineEntity(); List>> primaryFiltersList = new ArrayList>>(); @@ -284,36 +287,36 @@ public void testMapInterfaceOrTimelineRecords() throws Exception { } private static void assertPrimaryFilters(TimelineEntity entity) { - Assert.assertNotNull(entity.getPrimaryFilters()); - Assert.assertNotNull(entity.getPrimaryFiltersJAXB()); - Assert.assertTrue(entity.getPrimaryFilters() instanceof HashMap); - Assert.assertTrue(entity.getPrimaryFiltersJAXB() instanceof HashMap); - Assert.assertEquals( + assertNotNull(entity.getPrimaryFilters()); + assertNotNull(entity.getPrimaryFiltersJAXB()); + assertTrue(entity.getPrimaryFilters() instanceof HashMap); + assertTrue(entity.getPrimaryFiltersJAXB() instanceof HashMap); + assertEquals( entity.getPrimaryFilters(), entity.getPrimaryFiltersJAXB()); } private static void assertRelatedEntities(TimelineEntity entity) { - Assert.assertNotNull(entity.getRelatedEntities()); - Assert.assertNotNull(entity.getRelatedEntitiesJAXB()); - Assert.assertTrue(entity.getRelatedEntities() instanceof HashMap); - Assert.assertTrue(entity.getRelatedEntitiesJAXB() instanceof HashMap); - Assert.assertEquals( + assertNotNull(entity.getRelatedEntities()); + assertNotNull(entity.getRelatedEntitiesJAXB()); + assertTrue(entity.getRelatedEntities() instanceof HashMap); + assertTrue(entity.getRelatedEntitiesJAXB() instanceof HashMap); + assertEquals( entity.getRelatedEntities(), entity.getRelatedEntitiesJAXB()); } private static void assertOtherInfo(TimelineEntity entity) { - Assert.assertNotNull(entity.getOtherInfo()); - Assert.assertNotNull(entity.getOtherInfoJAXB()); - Assert.assertTrue(entity.getOtherInfo() instanceof HashMap); - Assert.assertTrue(entity.getOtherInfoJAXB() instanceof HashMap); - Assert.assertEquals(entity.getOtherInfo(), entity.getOtherInfoJAXB()); + assertNotNull(entity.getOtherInfo()); + assertNotNull(entity.getOtherInfoJAXB()); + assertTrue(entity.getOtherInfo() instanceof HashMap); + assertTrue(entity.getOtherInfoJAXB() instanceof HashMap); + assertEquals(entity.getOtherInfo(), entity.getOtherInfoJAXB()); } private static void assertEventInfo(TimelineEvent event) { - Assert.assertNotNull(event); - Assert.assertNotNull(event.getEventInfoJAXB()); - Assert.assertTrue(event.getEventInfo() instanceof HashMap); - Assert.assertTrue(event.getEventInfoJAXB() instanceof HashMap); - Assert.assertEquals(event.getEventInfo(), event.getEventInfoJAXB()); + assertNotNull(event); + assertNotNull(event.getEventInfoJAXB()); + assertTrue(event.getEventInfo() instanceof HashMap); + assertTrue(event.getEventInfoJAXB() instanceof HashMap); + assertEquals(event.getEventInfo(), event.getEventInfoJAXB()); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java index 221969bdd65..03aeb168b32 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java @@ -25,8 +25,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; -import org.junit.Test; -import org.junit.Assert; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; @@ -34,13 +33,20 @@ import java.util.Iterator; import java.util.Map; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -public class TestTimelineServiceRecords { + +class TestTimelineServiceRecords { private static final Log LOG = LogFactory.getLog(TestTimelineServiceRecords.class); @Test - public void testTimelineEntities() throws Exception { + void testTimelineEntities() throws Exception { TimelineEntity entity = new TimelineEntity(); entity.setType("test type 1"); entity.setId("test id 1"); @@ -48,8 +54,7 @@ public void testTimelineEntities() throws Exception { entity.addInfo("test info key 2", Arrays.asList("test info value 2", "test info value 3")); entity.addInfo("test info key 3", true); - Assert.assertTrue( - entity.getInfo().get("test info key 3") instanceof Boolean); + assertTrue(entity.getInfo().get("test info key 3") instanceof Boolean); entity.addConfig("test config key 1", "test config value 1"); entity.addConfig("test config key 2", "test config value 2"); @@ -59,43 +64,43 @@ public void testTimelineEntities() throws Exception { metric1.addValue(1L, 1.0F); metric1.addValue(3L, 3.0D); metric1.addValue(2L, 2); - Assert.assertEquals(TimelineMetric.Type.TIME_SERIES, metric1.getType()); + assertEquals(TimelineMetric.Type.TIME_SERIES, metric1.getType()); Iterator> itr = metric1.getValues().entrySet().iterator(); Map.Entry entry = itr.next(); - Assert.assertEquals(new Long(3L), entry.getKey()); - Assert.assertEquals(3.0D, entry.getValue()); + assertEquals(new Long(3L), entry.getKey()); + assertEquals(3.0D, entry.getValue()); entry = itr.next(); - Assert.assertEquals(new Long(2L), entry.getKey()); - Assert.assertEquals(2, entry.getValue()); + assertEquals(new Long(2L), entry.getKey()); + assertEquals(2, entry.getValue()); entry = itr.next(); - Assert.assertEquals(new Long(1L), entry.getKey()); - Assert.assertEquals(1.0F, entry.getValue()); - Assert.assertFalse(itr.hasNext()); + assertEquals(new Long(1L), entry.getKey()); + assertEquals(1.0F, entry.getValue()); + assertFalse(itr.hasNext()); entity.addMetric(metric1); TimelineMetric metric2 = new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE); metric2.setId("test metric id 1"); metric2.addValue(3L, (short) 3); - Assert.assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric2.getType()); - Assert.assertTrue( + assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric2.getType()); + assertTrue( metric2.getValues().values().iterator().next() instanceof Short); Map points = new HashMap<>(); points.put(4L, 4.0D); points.put(5L, 5.0D); try { metric2.setValues(points); - Assert.fail(); + fail("Should throw IllegalArgumentException."); } catch (IllegalArgumentException e) { - Assert.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( "Values cannot contain more than one point in")); } try { metric2.addValues(points); - Assert.fail(); + fail("Should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { - Assert.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( "Values cannot contain more than one point in")); } entity.addMetric(metric2); @@ -104,9 +109,8 @@ public void testTimelineEntities() throws Exception { new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE); metric3.setId("test metric id 1"); metric3.addValue(4L, (short) 4); - Assert.assertEquals("metric3 should equal to metric2! ", metric3, metric2); - Assert.assertNotEquals("metric1 should not equal to metric2! ", - metric1, metric2); + assertEquals(metric3, metric2, "metric3 should equal to metric2!"); + assertNotEquals(metric1, metric2, "metric1 should not equal to metric2!"); TimelineEvent event1 = new TimelineEvent(); event1.setId("test event id 1"); @@ -114,8 +118,7 @@ public void testTimelineEntities() throws Exception { event1.addInfo("test info key 2", Arrays.asList("test info value 2", "test info value 3")); event1.addInfo("test info key 3", true); - Assert.assertTrue( - event1.getInfo().get("test info key 3") instanceof Boolean); + assertTrue(event1.getInfo().get("test info key 3") instanceof Boolean); event1.setTimestamp(1L); entity.addEvent(event1); @@ -125,19 +128,17 @@ public void testTimelineEntities() throws Exception { event2.addInfo("test info key 2", Arrays.asList("test info value 2", "test info value 3")); event2.addInfo("test info key 3", true); - Assert.assertTrue( + assertTrue( event2.getInfo().get("test info key 3") instanceof Boolean); event2.setTimestamp(2L); entity.addEvent(event2); - Assert.assertFalse("event1 should not equal to event2! ", - event1.equals(event2)); + assertFalse(event1.equals(event2), "event1 should not equal to event2!"); TimelineEvent event3 = new TimelineEvent(); event3.setId("test event id 1"); event3.setTimestamp(1L); - Assert.assertEquals("event1 should equal to event3! ", event3, event1); - Assert.assertNotEquals("event1 should not equal to event2! ", - event1, event2); + assertEquals(event3, event1, "event1 should equal to event3!"); + assertNotEquals(event1, event2, "event1 should not equal to event2!"); entity.setCreatedTime(0L); entity.addRelatesToEntity("test type 2", "test id 2"); @@ -153,25 +154,23 @@ public void testTimelineEntities() throws Exception { entities.addEntity(entity2); LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true)); - Assert.assertFalse("entity 1 should not be valid without type and id", - entity1.isValid()); + assertFalse(entity1.isValid(), + "entity 1 should not be valid without type and id"); entity1.setId("test id 2"); entity1.setType("test type 2"); entity2.setId("test id 1"); entity2.setType("test type 1"); - Assert.assertEquals("Timeline entity should equal to entity2! ", - entity, entity2); - Assert.assertNotEquals("entity1 should not equal to entity! ", - entity1, entity); - Assert.assertEquals("entity should be less than entity1! ", - entity1.compareTo(entity), 1); - Assert.assertEquals("entity's hash code should be -28727840 but not " - + entity.hashCode(), entity.hashCode(), -28727840); + assertEquals(entity, entity2, "Timeline entity should equal to entity2! "); + assertNotEquals(entity1, entity, "entity1 should not equal to entity! "); + assertEquals(entity1.compareTo(entity), 1, + "entity should be less than entity1! "); + assertEquals(-28727840, entity.hashCode(), + "entity's hash code should be -28727840 but not " + entity.hashCode()); } @Test - public void testFirstClassCitizenEntities() throws Exception { + void testFirstClassCitizenEntities() throws Exception { UserEntity user = new UserEntity(); user.setId("test user id"); @@ -245,49 +244,49 @@ public void testFirstClassCitizenEntities() throws Exception { // Check parent/children APIs - Assert.assertNotNull(app1.getParent()); - Assert.assertEquals(flow2.getType(), app1.getParent().getType()); - Assert.assertEquals(flow2.getId(), app1.getParent().getId()); + assertNotNull(app1.getParent()); + assertEquals(flow2.getType(), app1.getParent().getType()); + assertEquals(flow2.getId(), app1.getParent().getId()); app1.addInfo(ApplicationEntity.PARENT_INFO_KEY, "invalid parent object"); try { app1.getParent(); - Assert.fail(); + fail("Should throw Exception"); } catch (Exception e) { - Assert.assertTrue(e instanceof YarnRuntimeException); - Assert.assertTrue(e.getMessage().contains( + assertTrue(e instanceof YarnRuntimeException); + assertTrue(e.getMessage().contains( "Parent info is invalid identifier object")); } - Assert.assertNotNull(app1.getChildren()); - Assert.assertEquals(1, app1.getChildren().size()); - Assert.assertEquals( + assertNotNull(app1.getChildren()); + assertEquals(1, app1.getChildren().size()); + assertEquals( appAttempt.getType(), app1.getChildren().iterator().next().getType()); - Assert.assertEquals( + assertEquals( appAttempt.getId(), app1.getChildren().iterator().next().getId()); app1.addInfo(ApplicationEntity.CHILDREN_INFO_KEY, Collections.singletonList("invalid children set")); try { app1.getChildren(); - Assert.fail(); + fail("Should throw Exception"); } catch (Exception e) { - Assert.assertTrue(e instanceof YarnRuntimeException); - Assert.assertTrue(e.getMessage().contains( + assertTrue(e instanceof YarnRuntimeException); + assertTrue(e.getMessage().contains( "Children info is invalid identifier set")); } app1.addInfo(ApplicationEntity.CHILDREN_INFO_KEY, Collections.singleton("invalid child object")); try { app1.getChildren(); - Assert.fail(); + fail("Should throw Exception"); } catch (Exception e) { - Assert.assertTrue(e instanceof YarnRuntimeException); - Assert.assertTrue(e.getMessage().contains( + assertTrue(e instanceof YarnRuntimeException); + assertTrue(e.getMessage().contains( "Children info contains invalid identifier object")); } } @Test - public void testUser() throws Exception { + void testUser() throws Exception { UserEntity user = new UserEntity(); user.setId("test user id"); user.addInfo("test info key 1", "test info value 1"); @@ -296,7 +295,7 @@ public void testUser() throws Exception { } @Test - public void testQueue() throws Exception { + void testQueue() throws Exception { QueueEntity queue = new QueueEntity(); queue.setId("test queue id"); queue.addInfo("test info key 1", "test info value 1"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/TestClientRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/TestClientRMProxy.java index 6c31fea7d56..8c859b9f632 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/TestClientRMProxy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/TestClientRMProxy.java @@ -29,20 +29,20 @@ import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.util.Records; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.net.InetSocketAddress; import java.security.PrivilegedExceptionAction; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -public class TestClientRMProxy { +class TestClientRMProxy { @Test - public void testGetRMDelegationTokenService() { + void testGetRMDelegationTokenService() { String defaultRMAddress = YarnConfiguration.DEFAULT_RM_ADDRESS; YarnConfiguration conf = new YarnConfiguration(); @@ -51,8 +51,8 @@ public void testGetRMDelegationTokenService() { String[] services = tokenService.toString().split(","); assertEquals(1, services.length); for (String service : services) { - assertTrue("Incorrect token service name", - service.contains(defaultRMAddress)); + assertTrue(service.contains(defaultRMAddress), + "Incorrect token service name"); } // HA is enabled @@ -66,13 +66,13 @@ public void testGetRMDelegationTokenService() { services = tokenService.toString().split(","); assertEquals(2, services.length); for (String service : services) { - assertTrue("Incorrect token service name", - service.contains(defaultRMAddress)); + assertTrue(service.contains(defaultRMAddress), + "Incorrect token service name"); } } @Test - public void testGetAMRMTokenService() { + void testGetAMRMTokenService() { String defaultRMAddress = YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS; YarnConfiguration conf = new YarnConfiguration(); @@ -81,8 +81,8 @@ public void testGetAMRMTokenService() { String[] services = tokenService.toString().split(","); assertEquals(1, services.length); for (String service : services) { - assertTrue("Incorrect token service name", - service.contains(defaultRMAddress)); + assertTrue(service.contains(defaultRMAddress), + "Incorrect token service name"); } // HA is enabled @@ -96,8 +96,8 @@ public void testGetAMRMTokenService() { services = tokenService.toString().split(","); assertEquals(2, services.length); for (String service : services) { - assertTrue("Incorrect token service name", - service.contains(defaultRMAddress)); + assertTrue(service.contains(defaultRMAddress), + "Incorrect token service name"); } } @@ -109,7 +109,7 @@ public void testGetAMRMTokenService() { * @throws Exception an Exception occurred */ @Test - public void testProxyUserCorrectUGI() throws Exception { + void testProxyUserCorrectUGI() throws Exception { final YarnConfiguration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true); conf.set(YarnConfiguration.RM_HA_IDS, "rm1,rm2"); @@ -163,7 +163,7 @@ private void assertUGI() throws IOException { UGICapturingHadoopYarnProtoRPC.lastCurrentUser; assertNotNull(lastCurrentUser); assertEquals("proxy", lastCurrentUser.getShortUserName()); - Assert.assertEquals(UserGroupInformation.AuthenticationMethod.PROXY, + assertEquals(UserGroupInformation.AuthenticationMethod.PROXY, lastCurrentUser.getAuthenticationMethod()); assertEquals(UserGroupInformation.getCurrentUser(), lastCurrentUser.getRealUser()); @@ -187,7 +187,7 @@ public Object getProxy(Class protocol, InetSocketAddress addr, try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException ioe) { - Assert.fail("Unable to get current user\n" + fail("Unable to get current user\n" + StringUtils.stringifyException(ioe)); } lastCurrentUser = currentUser; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java index e3fffef6143..8292c1f142c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java @@ -18,6 +18,10 @@ package org.apache.hadoop.yarn.client.api.impl; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; @@ -49,143 +53,142 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientHandlerException; import com.sun.jersey.api.client.ClientResponse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestTimelineClient { private TimelineClientImpl client; private TimelineWriter spyTimelineWriter; - @Before - public void setup() { + @BeforeEach + void setup() { YarnConfiguration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 1.0f); client = createTimelineClient(conf); } - @After - public void tearDown() { + @AfterEach + void tearDown() { if (client != null) { client.stop(); } } @Test - public void testPostEntities() throws Exception { + void testPostEntities() throws Exception { mockEntityClientResponse(spyTimelineWriter, ClientResponse.Status.OK, false, false); try { TimelinePutResponse response = client.putEntities(generateEntity()); - Assert.assertEquals(0, response.getErrors().size()); + assertEquals(0, response.getErrors().size()); } catch (YarnException e) { - Assert.fail("Exception is not expected"); + fail("Exception is not expected"); } } @Test - public void testPostEntitiesWithError() throws Exception { + void testPostEntitiesWithError() throws Exception { mockEntityClientResponse(spyTimelineWriter, ClientResponse.Status.OK, true, false); try { TimelinePutResponse response = client.putEntities(generateEntity()); - Assert.assertEquals(1, response.getErrors().size()); - Assert.assertEquals("test entity id", response.getErrors().get(0) + assertEquals(1, response.getErrors().size()); + assertEquals("test entity id", response.getErrors().get(0) .getEntityId()); - Assert.assertEquals("test entity type", response.getErrors().get(0) + assertEquals("test entity type", response.getErrors().get(0) .getEntityType()); - Assert.assertEquals(TimelinePutResponse.TimelinePutError.IO_EXCEPTION, + assertEquals(TimelinePutResponse.TimelinePutError.IO_EXCEPTION, response.getErrors().get(0).getErrorCode()); } catch (YarnException e) { - Assert.fail("Exception is not expected"); + fail("Exception is not expected"); } } @Test - public void testPostIncompleteEntities() throws Exception { + void testPostIncompleteEntities() throws Exception { try { client.putEntities(new TimelineEntity()); - Assert.fail("Exception should have been thrown"); + fail("Exception should have been thrown"); } catch (YarnException e) { } } @Test - public void testPostEntitiesNoResponse() throws Exception { + void testPostEntitiesNoResponse() throws Exception { mockEntityClientResponse(spyTimelineWriter, ClientResponse.Status.INTERNAL_SERVER_ERROR, false, false); try { client.putEntities(generateEntity()); - Assert.fail("Exception is expected"); + fail("Exception is expected"); } catch (YarnException e) { - Assert.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( "Failed to get the response from the timeline server.")); } } @Test - public void testPostEntitiesConnectionRefused() throws Exception { + void testPostEntitiesConnectionRefused() throws Exception { mockEntityClientResponse(spyTimelineWriter, null, false, true); try { client.putEntities(generateEntity()); - Assert.fail("RuntimeException is expected"); + fail("RuntimeException is expected"); } catch (RuntimeException re) { - Assert.assertTrue(re instanceof ClientHandlerException); + assertTrue(re instanceof ClientHandlerException); } } @Test - public void testPutDomain() throws Exception { + void testPutDomain() throws Exception { mockDomainClientResponse(spyTimelineWriter, ClientResponse.Status.OK, false); try { client.putDomain(generateDomain()); } catch (YarnException e) { - Assert.fail("Exception is not expected"); + fail("Exception is not expected"); } } @Test - public void testPutDomainNoResponse() throws Exception { + void testPutDomainNoResponse() throws Exception { mockDomainClientResponse(spyTimelineWriter, ClientResponse.Status.FORBIDDEN, false); try { client.putDomain(generateDomain()); - Assert.fail("Exception is expected"); + fail("Exception is expected"); } catch (YarnException e) { - Assert.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( "Failed to get the response from the timeline server.")); } } @Test - public void testPutDomainConnectionRefused() throws Exception { + void testPutDomainConnectionRefused() throws Exception { mockDomainClientResponse(spyTimelineWriter, null, true); try { client.putDomain(generateDomain()); - Assert.fail("RuntimeException is expected"); + fail("RuntimeException is expected"); } catch (RuntimeException re) { - Assert.assertTrue(re instanceof ClientHandlerException); + assertTrue(re instanceof ClientHandlerException); } } @Test - public void testCheckRetryCount() throws Exception { + void testCheckRetryCount() throws Exception { try { YarnConfiguration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, -2); createTimelineClient(conf); - Assert.fail(); + fail("Should throw Exception"); } catch(IllegalArgumentException e) { - Assert.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES)); } @@ -195,9 +198,9 @@ public void testCheckRetryCount() throws Exception { conf.setLong(YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS, 0); createTimelineClient(conf); - Assert.fail(); + fail("Should throw Exception"); } catch(IllegalArgumentException e) { - Assert.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( YarnConfiguration.TIMELINE_SERVICE_CLIENT_RETRY_INTERVAL_MS)); } int newMaxRetries = 5; @@ -212,20 +215,19 @@ public void testCheckRetryCount() throws Exception { try { // This call should fail because there is no timeline server client.putEntities(generateEntity()); - Assert.fail("Exception expected! " + fail("Exception expected! " + "Timeline server should be off to run this test. "); } catch (RuntimeException ce) { - Assert.assertTrue( - "Handler exception for reason other than retry: " + ce.getMessage(), - ce.getMessage().contains("Connection retries limit exceeded")); + assertTrue(ce.getMessage().contains("Connection retries limit exceeded"), + "Handler exception for reason other than retry: " + ce.getMessage()); // we would expect this exception here, check if the client has retried - Assert.assertTrue("Retry filter didn't perform any retries! ", - client.connector.connectionRetry.getRetired()); + assertTrue(client.connector.connectionRetry.getRetired(), + "Retry filter didn't perform any retries! "); } } @Test - public void testDelegationTokenOperationsRetry() throws Exception { + void testDelegationTokenOperationsRetry() throws Exception { int newMaxRetries = 5; long newIntervalMs = 500; YarnConfiguration conf = new YarnConfiguration(); @@ -309,17 +311,16 @@ public void testDelegationTokenOperationsRetry() throws Exception { } private static void assertFail() { - Assert.fail("Exception expected! " + fail("Exception expected! " + "Timeline server should be off to run this test."); } private void assertException(TimelineClientImpl client, RuntimeException ce) { - Assert.assertTrue( - "Handler exception for reason other than retry: " + ce.toString(), ce - .getMessage().contains("Connection retries limit exceeded")); + assertTrue(ce.getMessage().contains("Connection retries limit exceeded"), + "Handler exception for reason other than retry: " + ce.toString()); // we would expect this exception here, check if the client has retried - Assert.assertTrue("Retry filter didn't perform any retries! ", - client.connector.connectionRetry.getRetired()); + assertTrue(client.connector.connectionRetry.getRetired(), + "Retry filter didn't perform any retries! "); } public static ClientResponse mockEntityClientResponse( @@ -448,7 +449,7 @@ protected TimelineConnector createTimelineConnector() { } @Test - public void testTimelineClientCleanup() throws Exception { + void testTimelineClientCleanup() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 0); @@ -477,7 +478,7 @@ public void testTimelineClientCleanup() throws Exception { reloaderThread = thread; } } - Assert.assertTrue("Reloader is not alive", reloaderThread.isAlive()); + assertTrue(reloaderThread.isAlive(), "Reloader is not alive"); client.close(); @@ -489,7 +490,7 @@ public void testTimelineClientCleanup() throws Exception { } Thread.sleep(1000); } - Assert.assertFalse("Reloader is still alive", reloaderStillAlive); + assertFalse(reloaderStillAlive, "Reloader is still alive"); } private static class TestTimelineDelegationTokenSecretManager extends diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientForATS1_5.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientForATS1_5.java index d3826e1a6fe..469283c1a8c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientForATS1_5.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientForATS1_5.java @@ -18,6 +18,8 @@ package org.apache.hadoop.yarn.client.api.impl; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -42,13 +44,12 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestTimelineClientForATS1_5 { @@ -60,8 +61,8 @@ private static File localActiveDir; private TimelineWriter spyTimelineWriter; - @Before - public void setup() throws Exception { + @BeforeEach + void setup() throws Exception { localFS = FileContext.getLocalFSFileContext(); localActiveDir = new File("target", this.getClass().getSimpleName() + "-activeDir") @@ -80,8 +81,8 @@ public void setup() throws Exception { client = createTimelineClient(conf); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { if (client != null) { client.stop(); } @@ -89,7 +90,7 @@ public void tearDown() throws Exception { } @Test - public void testPostEntities() throws Exception { + void testPostEntities() throws Exception { ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(), 1); TimelineEntityGroupId groupId = @@ -117,7 +118,7 @@ public void testPostEntities() throws Exception { TimelineEntity[] entityTDB = new TimelineEntity[1]; entityTDB[0] = entities[0]; verify(spyTimelineWriter, times(1)).putEntities(entityTDB); - Assert.assertTrue(localFS.util().exists( + assertTrue(localFS.util().exists( new Path(getAppAttemptDir(attemptId1), "summarylog-" + attemptId1.toString()))); reset(spyTimelineWriter); @@ -131,23 +132,23 @@ public void testPostEntities() throws Exception { client.putEntities(attemptId2, groupId2, entities); verify(spyTimelineWriter, times(0)).putEntities( any(TimelineEntity[].class)); - Assert.assertTrue(localFS.util().exists( + assertTrue(localFS.util().exists( new Path(getAppAttemptDir(attemptId2), "summarylog-" + attemptId2.toString()))); - Assert.assertTrue(localFS.util().exists( + assertTrue(localFS.util().exists( new Path(getAppAttemptDir(attemptId2), "entitylog-" + groupId.toString()))); - Assert.assertTrue(localFS.util().exists( + assertTrue(localFS.util().exists( new Path(getAppAttemptDir(attemptId2), "entitylog-" + groupId2.toString()))); reset(spyTimelineWriter); } catch (Exception e) { - Assert.fail("Exception is not expected. " + e); + fail("Exception is not expected. " + e); } } @Test - public void testPutDomain() { + void testPutDomain() { ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(), 1); ApplicationAttemptId attemptId1 = @@ -161,12 +162,12 @@ public void testPutDomain() { client.putDomain(attemptId1, domain); verify(spyTimelineWriter, times(0)).putDomain(domain); - Assert.assertTrue(localFS.util().exists( + assertTrue(localFS.util().exists( new Path(getAppAttemptDir(attemptId1), "domainlog-" + attemptId1.toString()))); reset(spyTimelineWriter); } catch (Exception e) { - Assert.fail("Exception is not expected." + e); + fail("Exception is not expected." + e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java index c5b02fd32ca..62179b9dc28 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java @@ -19,9 +19,11 @@ package org.apache.hadoop.yarn.client.api.impl; import java.io.IOException; +import java.lang.reflect.Method; import java.net.URI; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import javax.ws.rs.core.MultivaluedMap; @@ -32,12 +34,8 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInfo; public class TestTimelineClientV2Impl { private static final Log LOG = @@ -46,20 +44,19 @@ private static final long TIME_TO_SLEEP = 150L; private static final String EXCEPTION_MSG = "Exception in the content"; - @Before - public void setup() { + @BeforeEach + public void setup(TestInfo testInfo) { conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f); conf.setInt(YarnConfiguration.NUMBER_OF_ASYNC_ENTITIES_TO_MERGE, 3); - if (!currTestName.getMethodName() + Optional testMethod = testInfo.getTestMethod(); + if (!testMethod.isPresent() || !testMethod.get().getName() .contains("testRetryOnConnectionFailure")) { client = createTimelineClient(conf); } } - @Rule - public TestName currTestName = new TestName(); private YarnConfiguration conf; private TestV2TimelineClient createTimelineClient(YarnConfiguration config) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java index fc2c1d0d335..9828223e0e1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestHAUtil.java @@ -22,18 +22,18 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.util.Collection; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -public class TestHAUtil { +class TestHAUtil { private Configuration conf; private static final String RM1_ADDRESS_UNTRIMMED = " \t\t\n 1.2.3.4:8021 \n\t "; @@ -48,8 +48,8 @@ private static final String RM_NODE_IDS_UNTRIMMED = RM1_NODE_ID_UNTRIMMED + "," + RM2_NODE_ID; private static final String RM_NODE_IDS = RM1_NODE_ID + "," + RM2_NODE_ID; - @Before - public void setUp() { + @BeforeEach + void setUp() { conf = new Configuration(); conf.set(YarnConfiguration.RM_HA_IDS, RM_NODE_IDS_UNTRIMMED); conf.set(YarnConfiguration.RM_HA_ID, RM1_NODE_ID_UNTRIMMED); @@ -62,7 +62,7 @@ public void setUp() { } @Test - public void testGetRMServiceId() throws Exception { + void testGetRMServiceId() throws Exception { conf.set(YarnConfiguration.RM_HA_IDS, RM1_NODE_ID + "," + RM2_NODE_ID); Collection rmhaIds = HAUtil.getRMHAIds(conf); assertEquals(2, rmhaIds.size()); @@ -73,18 +73,18 @@ public void testGetRMServiceId() throws Exception { } @Test - public void testGetRMId() throws Exception { + void testGetRMId() throws Exception { conf.set(YarnConfiguration.RM_HA_ID, RM1_NODE_ID); assertEquals("Does not honor " + YarnConfiguration.RM_HA_ID, RM1_NODE_ID, HAUtil.getRMHAId(conf)); conf.clear(); - assertNull("Return null when " + YarnConfiguration.RM_HA_ID - + " is not set", HAUtil.getRMHAId(conf)); + assertNull(HAUtil.getRMHAId(conf), + "Return null when " + YarnConfiguration.RM_HA_ID + " is not set"); } @Test - public void testVerifyAndSetConfiguration() throws Exception { + void testVerifyAndSetConfiguration() throws Exception { Configuration myConf = new Configuration(conf); try { @@ -93,14 +93,14 @@ public void testVerifyAndSetConfiguration() throws Exception { fail("Should not throw any exceptions."); } - assertEquals("Should be saved as Trimmed collection", - StringUtils.getStringCollection(RM_NODE_IDS), - HAUtil.getRMHAIds(myConf)); - assertEquals("Should be saved as Trimmed string", - RM1_NODE_ID, HAUtil.getRMHAId(myConf)); + assertEquals(StringUtils.getStringCollection(RM_NODE_IDS), + HAUtil.getRMHAIds(myConf), + "Should be saved as Trimmed collection"); + assertEquals(RM1_NODE_ID, HAUtil.getRMHAId(myConf), + "Should be saved as Trimmed string"); for (String confKey : YarnConfiguration.getServiceAddressConfKeys(myConf)) { - assertEquals("RPC address not set for " + confKey, - RM1_ADDRESS, myConf.get(confKey)); + assertEquals(RM1_ADDRESS, myConf.get(confKey), + "RPC address not set for " + confKey); } myConf = new Configuration(conf); @@ -108,12 +108,11 @@ public void testVerifyAndSetConfiguration() throws Exception { try { HAUtil.verifyAndSetConfiguration(myConf); } catch (YarnRuntimeException e) { - assertEquals("YarnRuntimeException by verifyAndSetRMHAIds()", - HAUtil.BAD_CONFIG_MESSAGE_PREFIX + + assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.getInvalidValueMessage(YarnConfiguration.RM_HA_IDS, myConf.get(YarnConfiguration.RM_HA_IDS) + "\nHA mode requires atleast two RMs"), - e.getMessage()); + e.getMessage(), "YarnRuntimeException by verifyAndSetRMHAIds()"); } myConf = new Configuration(conf); @@ -127,10 +126,9 @@ public void testVerifyAndSetConfiguration() throws Exception { try { HAUtil.verifyAndSetConfiguration(myConf); } catch (YarnRuntimeException e) { - assertEquals("YarnRuntimeException by getRMId()", - HAUtil.BAD_CONFIG_MESSAGE_PREFIX + + assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.getNeedToSetValueMessage(YarnConfiguration.RM_HA_ID), - e.getMessage()); + e.getMessage(), "YarnRuntimeException by getRMId()"); } myConf = new Configuration(conf); @@ -144,11 +142,10 @@ public void testVerifyAndSetConfiguration() throws Exception { try { HAUtil.verifyAndSetConfiguration(myConf); } catch (YarnRuntimeException e) { - assertEquals("YarnRuntimeException by addSuffix()", - HAUtil.BAD_CONFIG_MESSAGE_PREFIX + + assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.getInvalidValueMessage(YarnConfiguration.RM_HA_ID, - RM_INVALID_NODE_ID), - e.getMessage()); + RM_INVALID_NODE_ID), + e.getMessage(), "YarnRuntimeException by addSuffix()"); } myConf = new Configuration(); @@ -161,10 +158,10 @@ public void testVerifyAndSetConfiguration() throws Exception { } catch (YarnRuntimeException e) { String confKey = HAUtil.addSuffix(YarnConfiguration.RM_ADDRESS, RM1_NODE_ID); - assertEquals("YarnRuntimeException by Configuration#set()", - HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.getNeedToSetValueMessage( - HAUtil.addSuffix(YarnConfiguration.RM_HOSTNAME, RM1_NODE_ID) - + " or " + confKey), e.getMessage()); + assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + + HAUtil.getNeedToSetValueMessage(HAUtil.addSuffix( + YarnConfiguration.RM_HOSTNAME, RM1_NODE_ID) + " or " + confKey), + e.getMessage(), "YarnRuntimeException by Configuration#set()"); } // simulate the case YarnConfiguration.RM_HA_IDS doesn't contain @@ -180,10 +177,9 @@ public void testVerifyAndSetConfiguration() throws Exception { try { HAUtil.verifyAndSetConfiguration(myConf); } catch (YarnRuntimeException e) { - assertEquals("YarnRuntimeException by getRMId()'s validation", - HAUtil.BAD_CONFIG_MESSAGE_PREFIX + - HAUtil.getRMHAIdNeedToBeIncludedMessage("[rm2, rm3]", RM1_NODE_ID), - e.getMessage()); + assertEquals(HAUtil.BAD_CONFIG_MESSAGE_PREFIX + + HAUtil.getRMHAIdNeedToBeIncludedMessage("[rm2, rm3]", RM1_NODE_ID), + e.getMessage(), "YarnRuntimeException by getRMId()'s validation"); } // simulate the case that no leader election is enabled @@ -196,19 +192,20 @@ public void testVerifyAndSetConfiguration() throws Exception { try { HAUtil.verifyAndSetConfiguration(myConf); } catch (YarnRuntimeException e) { - assertEquals("YarnRuntimeException by getRMId()'s validation", + assertEquals( HAUtil.BAD_CONFIG_MESSAGE_PREFIX + HAUtil.NO_LEADER_ELECTION_MESSAGE, - e.getMessage()); + e.getMessage(), "YarnRuntimeException by getRMId()'s validation"); } } @Test - public void testGetConfKeyForRMInstance() { - assertTrue("RM instance id is not suffixed", + void testGetConfKeyForRMInstance() { + assertTrue( HAUtil.getConfKeyForRMInstance(YarnConfiguration.RM_ADDRESS, conf) - .contains(HAUtil.getRMHAId(conf))); - assertFalse("RM instance id is suffixed", + .contains(HAUtil.getRMHAId(conf)), + "RM instance id is not suffixed"); + assertFalse( HAUtil.getConfKeyForRMInstance(YarnConfiguration.NM_ADDRESS, conf) - .contains(HAUtil.getRMHAId(conf))); + .contains(HAUtil.getRMHAId(conf)), "RM instance id is suffixed"); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfiguration.java index a053fdb9376..2e44bbd9345 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfiguration.java @@ -18,28 +18,27 @@ package org.apache.hadoop.yarn.conf; -import org.junit.Assert; - import org.apache.hadoop.yarn.webapp.util.WebAppUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.net.InetSocketAddress; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class TestYarnConfiguration { +class TestYarnConfiguration { @Test - public void testDefaultRMWebUrl() throws Exception { + void testDefaultRMWebUrl() throws Exception { YarnConfiguration conf = new YarnConfiguration(); String rmWebUrl = WebAppUtils.getRMWebAppURLWithScheme(conf); // shouldn't have a "/" on the end of the url as all the other uri routinnes // specifically add slashes and Jetty doesn't handle double slashes. - Assert.assertNotSame("RM Web Url is not correct", "http://0.0.0.0:8088", - rmWebUrl); + assertNotSame("http://0.0.0.0:8088", rmWebUrl, + "RM Web Url is not correct"); // test it in HA scenario conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true); @@ -47,7 +46,7 @@ public void testDefaultRMWebUrl() throws Exception { conf.set("yarn.resourcemanager.webapp.address.rm1", "10.10.10.10:18088"); conf.set("yarn.resourcemanager.webapp.address.rm2", "20.20.20.20:28088"); String rmWebUrlinHA = WebAppUtils.getRMWebAppURLWithScheme(conf); - Assert.assertEquals("http://10.10.10.10:18088", rmWebUrlinHA); + assertEquals("http://10.10.10.10:18088", rmWebUrlinHA); YarnConfiguration conf2 = new YarnConfiguration(); conf2.setBoolean(YarnConfiguration.RM_HA_ENABLED, true); @@ -55,11 +54,11 @@ public void testDefaultRMWebUrl() throws Exception { conf2.set("yarn.resourcemanager.hostname.rm1", "30.30.30.30"); conf2.set("yarn.resourcemanager.hostname.rm2", "40.40.40.40"); String rmWebUrlinHA2 = WebAppUtils.getRMWebAppURLWithScheme(conf2); - Assert.assertEquals("http://30.30.30.30:8088", rmWebUrlinHA2); + assertEquals("http://30.30.30.30:8088", rmWebUrlinHA2); } @Test - public void testRMWebUrlSpecified() throws Exception { + void testRMWebUrlSpecified() throws Exception { YarnConfiguration conf = new YarnConfiguration(); // seems a bit odd but right now we are forcing webapp for RM to be // RM_ADDRESS @@ -68,15 +67,14 @@ public void testRMWebUrlSpecified() throws Exception { conf.set(YarnConfiguration.RM_ADDRESS, "rmtesting:9999"); String rmWebUrl = WebAppUtils.getRMWebAppURLWithScheme(conf); String[] parts = rmWebUrl.split(":"); - Assert.assertEquals("RM Web URL Port is incrrect", 24543, - Integer.parseInt(parts[parts.length - 1])); - Assert.assertNotSame( - "RM Web Url not resolved correctly. Should not be rmtesting", - "http://rmtesting:24543", rmWebUrl); + assertEquals(24543, Integer.parseInt(parts[parts.length - 1]), + "RM Web URL Port is incrrect"); + assertNotSame("http://rmtesting:24543", rmWebUrl, + "RM Web Url not resolved correctly. Should not be rmtesting"); } @Test - public void testGetSocketAddressForNMWithHA() { + void testGetSocketAddressForNMWithHA() { YarnConfiguration conf = new YarnConfiguration(); // Set NM address @@ -94,7 +92,7 @@ public void testGetSocketAddressForNMWithHA() { } @Test - public void testGetSocketAddr() throws Exception { + void testGetSocketAddr() throws Exception { YarnConfiguration conf; InetSocketAddress resourceTrackerAddress; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java index 2b9d7455a9d..b8e161180b8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/event/TestAsyncDispatcher.java @@ -24,13 +24,16 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static java.time.Duration.ofSeconds; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTimeout; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.*; -public class TestAsyncDispatcher { +class TestAsyncDispatcher { /* This test checks whether dispatcher hangs on close if following two things * happen : @@ -38,32 +41,35 @@ * 2. Event queue is empty on close. */ @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(timeout=10000) - public void testDispatcherOnCloseIfQueueEmpty() throws Exception { - BlockingQueue eventQueue = spy(new LinkedBlockingQueue()); - Event event = mock(Event.class); + @Test + void testDispatcherOnCloseIfQueueEmpty() throws Exception { + final BlockingQueue eventQueue = + spy(new LinkedBlockingQueue()); + final Event event = mock(Event.class); doThrow(new InterruptedException()).when(eventQueue).put(event); - DrainDispatcher disp = new DrainDispatcher(eventQueue); + final DrainDispatcher disp = new DrainDispatcher(eventQueue); disp.init(new Configuration()); disp.setDrainEventsOnStop(); - disp.start(); - // Wait for event handler thread to start and begin waiting for events. - disp.waitForEventThreadToWait(); - try { - disp.getEventHandler().handle(event); - Assert.fail("Expected YarnRuntimeException"); - } catch (YarnRuntimeException e) { - Assert.assertTrue(e.getCause() instanceof InterruptedException); - } - // Queue should be empty and dispatcher should not hang on close - Assert.assertTrue("Event Queue should have been empty", - eventQueue.isEmpty()); - disp.close(); + + assertTimeout(ofSeconds(10), () -> { + disp.start(); + // Wait for event handler thread to start and begin waiting for events. + disp.waitForEventThreadToWait(); + try { + disp.getEventHandler().handle(event); + fail("Expected YarnRuntimeException"); + } catch (YarnRuntimeException e) { + assertTrue(e.getCause() instanceof InterruptedException); + } + // Queue should be empty and dispatcher should not hang on close + assertTrue(eventQueue.isEmpty(), "Event Queue should have been empty"); + disp.close(); + }); } // Test dispatcher should timeout on draining events. - @Test(timeout=10000) - public void testDispatchStopOnTimeout() throws Exception { + @Test + void testDispatchStopOnTimeout() throws Exception { BlockingQueue eventQueue = new LinkedBlockingQueue(); eventQueue = spy(eventQueue); // simulate dispatcher is not drained. @@ -71,12 +77,15 @@ public void testDispatchStopOnTimeout() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.setInt(YarnConfiguration.DISPATCHER_DRAIN_EVENTS_TIMEOUT, 2000); - DrainDispatcher disp = new DrainDispatcher(eventQueue); + final DrainDispatcher disp = new DrainDispatcher(eventQueue); disp.init(conf); disp.setDrainEventsOnStop(); - disp.start(); - disp.waitForEventThreadToWait(); - disp.close(); + + assertTimeout(ofSeconds(10), () -> { + disp.start(); + disp.waitForEventThreadToWait(); + disp.close(); + }); } @SuppressWarnings("rawtypes") @@ -104,20 +113,23 @@ private void dispatchDummyEvents(Dispatcher disp, int count) { // Test if drain dispatcher drains events on stop. @SuppressWarnings({ "rawtypes" }) - @Test(timeout=10000) - public void testDrainDispatcherDrainEventsOnStop() throws Exception { + @Test + void testDrainDispatcherDrainEventsOnStop() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.setInt(YarnConfiguration.DISPATCHER_DRAIN_EVENTS_TIMEOUT, 2000); BlockingQueue queue = new LinkedBlockingQueue(); - DrainDispatcher disp = new DrainDispatcher(queue); + final DrainDispatcher disp = new DrainDispatcher(queue); disp.init(conf); disp.register(DummyType.class, new DummyHandler()); disp.setDrainEventsOnStop(); - disp.start(); - disp.waitForEventThreadToWait(); - dispatchDummyEvents(disp, 2); - disp.close(); - assertEquals(0, queue.size()); + + assertTimeout(ofSeconds(10), () -> { + disp.start(); + disp.waitForEventThreadToWait(); + dispatchDummyEvents(disp, 2); + disp.close(); + assertEquals(0, queue.size()); + }); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java index 671d1e2ea71..85578782b72 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java @@ -21,71 +21,71 @@ import java.io.FileNotFoundException; import java.io.IOException; -import org.junit.Assert; - import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.junit.Test; import com.google.protobuf.ServiceException; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; -public class TestRPCUtil { +class TestRPCUtil { @Test - public void testUnknownExceptionUnwrapping() { + void testUnknownExceptionUnwrapping() { Class exception = YarnException.class; String className = "UnknownException.class"; verifyRemoteExceptionUnwrapping(exception, className); } @Test - public void testRemoteIOExceptionUnwrapping() { + void testRemoteIOExceptionUnwrapping() { Class exception = IOException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @Test - public void testRemoteIOExceptionDerivativeUnwrapping() { + void testRemoteIOExceptionDerivativeUnwrapping() { // Test IOException sub-class Class exception = FileNotFoundException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @Test - public void testRemoteYarnExceptionUnwrapping() { + void testRemoteYarnExceptionUnwrapping() { Class exception = YarnException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @Test - public void testRemoteYarnExceptionDerivativeUnwrapping() { + void testRemoteYarnExceptionDerivativeUnwrapping() { Class exception = YarnTestException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @Test - public void testRemoteRuntimeExceptionUnwrapping() { + void testRemoteRuntimeExceptionUnwrapping() { Class exception = NullPointerException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @Test - public void testUnexpectedRemoteExceptionUnwrapping() { + void testUnexpectedRemoteExceptionUnwrapping() { // Non IOException, YarnException thrown by the remote side. Class exception = Exception.class; verifyRemoteExceptionUnwrapping(RemoteException.class, exception.getName()); } @Test - public void testRemoteYarnExceptionWithoutStringConstructor() { + void testRemoteYarnExceptionWithoutStringConstructor() { // Derivatives of YarnException should always define a string constructor. Class exception = YarnTestExceptionNoConstructor.class; verifyRemoteExceptionUnwrapping(RemoteException.class, exception.getName()); } @Test - public void testRPCServiceExceptionUnwrapping() { + void testRPCServiceExceptionUnwrapping() { String message = "ServiceExceptionMessage"; ServiceException se = new ServiceException(message); @@ -96,12 +96,12 @@ public void testRPCServiceExceptionUnwrapping() { t = thrown; } - Assert.assertTrue(IOException.class.isInstance(t)); - Assert.assertTrue(t.getMessage().contains(message)); + assertTrue(IOException.class.isInstance(t)); + assertTrue(t.getMessage().contains(message)); } @Test - public void testRPCIOExceptionUnwrapping() { + void testRPCIOExceptionUnwrapping() { String message = "DirectIOExceptionMessage"; IOException ioException = new FileNotFoundException(message); ServiceException se = new ServiceException(ioException); @@ -112,12 +112,12 @@ public void testRPCIOExceptionUnwrapping() { } catch (Throwable thrown) { t = thrown; } - Assert.assertTrue(FileNotFoundException.class.isInstance(t)); - Assert.assertTrue(t.getMessage().contains(message)); + assertTrue(FileNotFoundException.class.isInstance(t)); + assertTrue(t.getMessage().contains(message)); } @Test - public void testRPCRuntimeExceptionUnwrapping() { + void testRPCRuntimeExceptionUnwrapping() { String message = "RPCRuntimeExceptionUnwrapping"; RuntimeException re = new NullPointerException(message); ServiceException se = new ServiceException(re); @@ -129,8 +129,8 @@ public void testRPCRuntimeExceptionUnwrapping() { t = thrown; } - Assert.assertTrue(NullPointerException.class.isInstance(t)); - Assert.assertTrue(t.getMessage().contains(message)); + assertTrue(NullPointerException.class.isInstance(t)); + assertTrue(t.getMessage().contains(message)); } private void verifyRemoteExceptionUnwrapping( @@ -147,18 +147,17 @@ private void verifyRemoteExceptionUnwrapping( t = thrown; } - Assert.assertTrue("Expected exception [" + expectedLocalException - + "] but found " + t, expectedLocalException.isInstance(t)); - Assert.assertTrue( - "Expected message [" + message + "] but found " + t.getMessage(), t - .getMessage().contains(message)); + assertTrue(expectedLocalException.isInstance(t), "Expected exception [" + + expectedLocalException + "] but found " + t); + assertTrue(t.getMessage().contains(message), + "Expected message [" + message + "] but found " + t.getMessage()); } private static class YarnTestException extends YarnException { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") - public YarnTestException(String message) { + YarnTestException(String message) { super(message); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogDeletionService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogDeletionService.java index 026996e010e..4f4533d6606 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogDeletionService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogDeletionService.java @@ -37,22 +37,23 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.Before; -import org.junit.Test; -import org.junit.Assert; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.mockito.Mockito.*; -public class TestAggregatedLogDeletionService { +class TestAggregatedLogDeletionService { - @Before - public void closeFilesystems() throws IOException { + @BeforeEach + void closeFilesystems() throws IOException { // prevent the same mockfs instance from being reused due to FS cache FileSystem.closeAll(); } @Test - public void testDeletion() throws Exception { + void testDeletion() throws Exception { long now = System.currentTimeMillis(); long toDeleteTime = now - (2000*1000); long toKeepTime = now - (1500*1000); @@ -189,7 +190,7 @@ protected void stopRMClient() { } @Test - public void testRefreshLogRetentionSettings() throws Exception { + void testRefreshLogRetentionSettings() throws Exception { long now = System.currentTimeMillis(); //time before 2000 sec long before2000Secs = now - (2000 * 1000); @@ -289,20 +290,20 @@ protected void stopRMClient() { conf.set(YarnConfiguration.LOG_AGGREGATION_RETAIN_CHECK_INTERVAL_SECONDS, "2"); //We have not called refreshLogSettings,hence don't expect to see the changed conf values - Assert.assertTrue(2000l != deletionSvc.getCheckIntervalMsecs()); + assertNotEquals(2000L, deletionSvc.getCheckIntervalMsecs()); //refresh the log settings deletionSvc.refreshLogRetentionSettings(); //Check interval time should reflect the new value - Assert.assertTrue(2000l == deletionSvc.getCheckIntervalMsecs()); + assertEquals(2000L, deletionSvc.getCheckIntervalMsecs()); //app2Dir should be deleted since it falls above the threshold verify(mockFs, timeout(10000)).delete(app2Dir, true); deletionSvc.stop(); } @Test - public void testCheckInterval() throws Exception { + void testCheckInterval() throws Exception { long RETENTION_SECS = 10 * 24 * 3600; long now = System.currentTimeMillis(); long toDeleteTime = now - RETENTION_SECS*1000; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java index efbaa4c44c1..5cbaf8d3adb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java @@ -18,6 +18,14 @@ package org.apache.hadoop.yarn.logaggregation; +import static java.time.Duration.ofMinutes; +import static java.time.Duration.ofSeconds; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTimeout; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import static org.mockito.Mockito.doThrow; @@ -37,7 +45,6 @@ import java.util.Collections; import java.util.concurrent.CountDownLatch; -import org.junit.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -58,12 +65,11 @@ import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter; import org.apache.hadoop.yarn.util.Times; -import org.junit.After; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -public class TestAggregatedLogFormat { +class TestAggregatedLogFormat { private static final File testWorkDir = new File("target", "TestAggregatedLogFormat"); @@ -81,9 +87,9 @@ } } - @Before - @After - public void cleanupTestDir() throws Exception { + @BeforeEach + @AfterEach + void cleanupTestDir() throws Exception { Path workDirPath = new Path(testWorkDir.getAbsolutePath()); LOG.info("Cleaning test directory [" + workDirPath + "]"); fs.delete(workDirPath, true); @@ -94,7 +100,7 @@ public void cleanupTestDir() throws Exception { //appending to logs @Test - public void testForCorruptedAggregatedLogs() throws Exception { + void testForCorruptedAggregatedLogs() throws Exception { Configuration conf = new Configuration(); File workDir = new File(testWorkDir, "testReadAcontainerLogs1"); Path remoteAppLogFile = @@ -119,7 +125,7 @@ public void testForCorruptedAggregatedLogs() throws Exception { LogReader.readAcontainerLogs(dis, writer); } catch (Exception e) { if(e.toString().contains("NumberFormatException")) { - Assert.fail("Aggregated logs are corrupted."); + fail("Aggregated logs are corrupted."); } } } @@ -181,7 +187,7 @@ public void run() { } @Test - public void testReadAcontainerLogs1() throws Exception { + void testReadAcontainerLogs1() throws Exception { //Verify the output generated by readAContainerLogs(DataInputStream, Writer, logUploadedTime) testReadAcontainerLog(true); @@ -236,8 +242,9 @@ private void testReadAcontainerLog(boolean logUploadedTime) throws Exception { // make sure permission are correct on the file FileStatus fsStatus = fs.getFileStatus(remoteAppLogFile); - Assert.assertEquals("permissions on log aggregation file are wrong", - FsPermission.createImmutable((short) 0640), fsStatus.getPermission()); + assertEquals(FsPermission.createImmutable((short) 0640), + fsStatus.getPermission(), + "permissions on log aggregation file are wrong"); LogReader logReader = new LogReader(conf, remoteAppLogFile); LogKey rLogKey = new LogKey(); @@ -261,122 +268,124 @@ private void testReadAcontainerLog(boolean logUploadedTime) throws Exception { + ("\nLogLength:" + numChars).length() + "\nLog Contents:\n".length() + numChars + "\n".length() + "\nEnd of LogType:stdout\n".length(); - Assert.assertTrue("LogType not matched", s.contains("LogType:stdout")); - Assert.assertTrue("log file:stderr should not be aggregated.", !s.contains("LogType:stderr")); - Assert.assertTrue("log file:logs should not be aggregated.", !s.contains("LogType:logs")); - Assert.assertTrue("LogLength not matched", s.contains("LogLength:" + numChars)); - Assert.assertTrue("Log Contents not matched", s.contains("Log Contents")); + assertTrue(s.contains("LogType:stdout"), "LogType not matched"); + assertTrue(!s.contains("LogType:stderr"), "log file:stderr should not be aggregated."); + assertTrue(!s.contains("LogType:logs"), "log file:logs should not be aggregated."); + assertTrue(s.contains("LogLength:" + numChars), "LogLength not matched"); + assertTrue(s.contains("Log Contents"), "Log Contents not matched"); StringBuilder sb = new StringBuilder(); for (int i = 0 ; i < numChars ; i++) { sb.append(filler); } String expectedContent = sb.toString(); - Assert.assertTrue("Log content incorrect", s.contains(expectedContent)); + assertTrue(s.contains(expectedContent), "Log content incorrect"); - Assert.assertEquals(expectedLength, s.length()); + assertEquals(expectedLength, s.length()); } - @Test(timeout=10000) - public void testContainerLogsFileAccess() throws IOException { + @Test + void testContainerLogsFileAccess() throws IOException { // This test will run only if NativeIO is enabled as SecureIOUtils // require it to be enabled. - Assume.assumeTrue(NativeIO.isAvailable()); - Configuration conf = new Configuration(); - conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, - "kerberos"); - UserGroupInformation.setConfiguration(conf); - File workDir = new File(testWorkDir, "testContainerLogsFileAccess1"); - Path remoteAppLogFile = - new Path(workDir.getAbsolutePath(), "aggregatedLogFile"); - Path srcFileRoot = new Path(workDir.getAbsolutePath(), "srcFiles"); - - String data = "Log File content for container : "; - // Creating files for container1. Log aggregator will try to read log files - // with illegal user. - ApplicationId applicationId = ApplicationId.newInstance(1, 1); - ApplicationAttemptId applicationAttemptId = - ApplicationAttemptId.newInstance(applicationId, 1); - ContainerId testContainerId1 = - ContainerId.newContainerId(applicationAttemptId, 1); - Path appDir = - new Path(srcFileRoot, testContainerId1.getApplicationAttemptId() - .getApplicationId().toString()); - Path srcFilePath1 = new Path(appDir, testContainerId1.toString()); - String stdout = "stdout"; - String stderr = "stderr"; - writeSrcFile(srcFilePath1, stdout, data + testContainerId1.toString() - + stdout); - writeSrcFile(srcFilePath1, stderr, data + testContainerId1.toString() - + stderr); - - UserGroupInformation ugi = - UserGroupInformation.getCurrentUser(); - try (LogWriter logWriter = new LogWriter()) { - logWriter.initialize(conf, remoteAppLogFile, ugi); - - LogKey logKey = new LogKey(testContainerId1); - String randomUser = "randomUser"; - LogValue logValue = - spy(new LogValue(Collections.singletonList(srcFileRoot.toString()), - testContainerId1, randomUser)); - - // It is trying simulate a situation where first log file is owned by - // different user (probably symlink) and second one by the user itself. - // The first file should not be aggregated. Because this log file has - // the invalid user name. - when(logValue.getUser()).thenReturn(randomUser).thenReturn( - ugi.getShortUserName()); - logWriter.append(logKey, logValue); - } + assumeTrue(NativeIO.isAvailable()); + assertTimeout(ofSeconds(10), () -> { + Configuration conf = new Configuration(); + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + UserGroupInformation.setConfiguration(conf); + File workDir = new File(testWorkDir, "testContainerLogsFileAccess1"); + Path remoteAppLogFile = + new Path(workDir.getAbsolutePath(), "aggregatedLogFile"); + Path srcFileRoot = new Path(workDir.getAbsolutePath(), "srcFiles"); + + String data = "Log File content for container : "; + // Creating files for container1. Log aggregator will try to read log files + // with illegal user. + ApplicationId applicationId = ApplicationId.newInstance(1, 1); + ApplicationAttemptId applicationAttemptId = + ApplicationAttemptId.newInstance(applicationId, 1); + ContainerId testContainerId1 = + ContainerId.newContainerId(applicationAttemptId, 1); + Path appDir = + new Path(srcFileRoot, testContainerId1.getApplicationAttemptId() + .getApplicationId().toString()); + Path srcFilePath1 = new Path(appDir, testContainerId1.toString()); + String stdout = "stdout"; + String stderr = "stderr"; + writeSrcFile(srcFilePath1, stdout, data + testContainerId1.toString() + + stdout); + writeSrcFile(srcFilePath1, stderr, data + testContainerId1.toString() + + stderr); + + UserGroupInformation ugi = + UserGroupInformation.getCurrentUser(); + try (LogWriter logWriter = new LogWriter()) { + logWriter.initialize(conf, remoteAppLogFile, ugi); + + LogKey logKey = new LogKey(testContainerId1); + String randomUser = "randomUser"; + LogValue logValue = + spy(new LogValue(Collections.singletonList(srcFileRoot.toString()), + testContainerId1, randomUser)); + + // It is trying simulate a situation where first log file is owned by + // different user (probably symlink) and second one by the user itself. + // The first file should not be aggregated. Because this log file has + // the invalid user name. + when(logValue.getUser()).thenReturn(randomUser).thenReturn( + ugi.getShortUserName()); + logWriter.append(logKey, logValue); + } - BufferedReader in = - new BufferedReader(new FileReader(new File(remoteAppLogFile - .toUri().getRawPath()))); - String line; - StringBuffer sb = new StringBuffer(""); - while ((line = in.readLine()) != null) { - LOG.info(line); - sb.append(line); - } - line = sb.toString(); + BufferedReader in = + new BufferedReader(new FileReader(new File(remoteAppLogFile + .toUri().getRawPath()))); + String line; + StringBuffer sb = new StringBuffer(""); + while ((line = in.readLine()) != null) { + LOG.info(line); + sb.append(line); + } + line = sb.toString(); - String expectedOwner = ugi.getShortUserName(); - if (Path.WINDOWS) { - final String adminsGroupString = "Administrators"; - if (Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString)) { - expectedOwner = adminsGroupString; + String expectedOwner = ugi.getShortUserName(); + if (Path.WINDOWS) { + final String adminsGroupString = "Administrators"; + if (Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString)) { + expectedOwner = adminsGroupString; + } } - } - // This file: stderr should not be aggregated. - // And we will not aggregate the log message. - String stdoutFile1 = - StringUtils.join( - File.separator, - Arrays.asList(new String[] { - workDir.getAbsolutePath(), "srcFiles", - testContainerId1.getApplicationAttemptId().getApplicationId() - .toString(), testContainerId1.toString(), stderr })); - - // The file: stdout is expected to be aggregated. - String stdoutFile2 = - StringUtils.join( - File.separator, - Arrays.asList(new String[] { - workDir.getAbsolutePath(), "srcFiles", - testContainerId1.getApplicationAttemptId().getApplicationId() - .toString(), testContainerId1.toString(), stdout })); - String message2 = - "Owner '" + expectedOwner + "' for path " - + stdoutFile2 + " did not match expected owner '" - + ugi.getShortUserName() + "'"; - - Assert.assertFalse(line.contains(message2)); - Assert.assertFalse(line.contains(data + testContainerId1.toString() - + stderr)); - Assert.assertTrue(line.contains(data + testContainerId1.toString() - + stdout)); + // This file: stderr should not be aggregated. + // And we will not aggregate the log message. + String stdoutFile1 = + StringUtils.join( + File.separator, + Arrays.asList(new String[]{ + workDir.getAbsolutePath(), "srcFiles", + testContainerId1.getApplicationAttemptId().getApplicationId() + .toString(), testContainerId1.toString(), stderr})); + + // The file: stdout is expected to be aggregated. + String stdoutFile2 = + StringUtils.join( + File.separator, + Arrays.asList(new String[]{ + workDir.getAbsolutePath(), "srcFiles", + testContainerId1.getApplicationAttemptId().getApplicationId() + .toString(), testContainerId1.toString(), stdout})); + String message2 = + "Owner '" + expectedOwner + "' for path " + + stdoutFile2 + " did not match expected owner '" + + ugi.getShortUserName() + "'"; + + assertFalse(line.contains(message2)); + assertFalse(line.contains(data + testContainerId1.toString() + + stderr)); + assertTrue(line.contains(data + testContainerId1.toString() + + stdout)); + }); } private void writeSrcFile(Path srcFilePath, String fileName, long length) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java index 1e71b3cdca8..25e7294517d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java @@ -49,22 +49,23 @@ import org.apache.hadoop.yarn.webapp.view.BlockForTest; import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; -import static org.junit.Assert.*; /** * Test AggregatedLogsBlock. AggregatedLogsBlock should check user, aggregate a * logs into one file and show this logs or errors into html code * */ -public class TestAggregatedLogsBlock { +class TestAggregatedLogsBlock { /** * Bad user. User 'owner' is trying to read logs without access */ @Test - public void testAccessDenied() throws Exception { + void testAccessDenied() throws Exception { FileUtil.fullyDelete(new File("target/logs")); Configuration configuration = getConfiguration(); @@ -89,7 +90,7 @@ public void testAccessDenied() throws Exception { } @Test - public void testBlockContainsPortNumForUnavailableAppLog() { + void testBlockContainsPortNumForUnavailableAppLog() { FileUtil.fullyDelete(new File("target/logs")); Configuration configuration = getConfiguration(); @@ -114,7 +115,7 @@ public void testBlockContainsPortNumForUnavailableAppLog() { * @throws Exception */ @Test - public void testBadLogs() throws Exception { + void testBadLogs() throws Exception { FileUtil.fullyDelete(new File("target/logs")); Configuration configuration = getConfiguration(); @@ -145,7 +146,7 @@ public void testBadLogs() throws Exception { * @throws Exception */ @Test - public void testAggregatedLogsBlock() throws Exception { + void testAggregatedLogsBlock() throws Exception { FileUtil.fullyDelete(new File("target/logs")); Configuration configuration = getConfiguration(); @@ -176,7 +177,7 @@ public void testAggregatedLogsBlock() throws Exception { * @throws Exception */ @Test - public void testAggregatedLogsBlockHar() throws Exception { + void testAggregatedLogsBlockHar() throws Exception { FileUtil.fullyDelete(new File("target/logs")); Configuration configuration = getConfiguration(); @@ -222,7 +223,7 @@ public void testAggregatedLogsBlockHar() throws Exception { * @throws Exception */ @Test - public void testNoLogs() throws Exception { + void testNoLogs() throws Exception { FileUtil.fullyDelete(new File("target/logs")); Configuration configuration = getConfiguration(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestContainerLogsUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestContainerLogsUtils.java index 8b665e03f6e..ed5742c333f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestContainerLogsUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestContainerLogsUtils.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.yarn.logaggregation; -import static org.junit.Assert.assertTrue; - import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -34,6 +32,8 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * This class contains several utility functions for log aggregation tests. */ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/NodeLabelTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/NodeLabelTestBase.java index 28b9497fa36..a9c187cb4aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/NodeLabelTestBase.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/NodeLabelTestBase.java @@ -27,17 +27,21 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeLabel; -import org.junit.Assert; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class NodeLabelTestBase { public static void assertMapEquals(Map> expected, ImmutableMap> actual) { - Assert.assertEquals(expected.size(), actual.size()); + assertEquals(expected.size(), actual.size()); for (NodeId k : expected.keySet()) { - Assert.assertTrue(actual.containsKey(k)); + assertTrue(actual.containsKey(k)); assertCollectionEquals(expected.get(k), actual.get(k)); } } @@ -45,9 +49,9 @@ public static void assertMapEquals(Map> expected, public static void assertLabelInfoMapEquals( Map> expected, ImmutableMap> actual) { - Assert.assertEquals(expected.size(), actual.size()); + assertEquals(expected.size(), actual.size()); for (NodeId k : expected.keySet()) { - Assert.assertTrue(actual.containsKey(k)); + assertTrue(actual.containsKey(k)); assertNLCollectionEquals(expected.get(k), actual.get(k)); } } @@ -55,13 +59,13 @@ public static void assertLabelInfoMapEquals( public static void assertLabelsToNodesEquals( Map> expected, ImmutableMap> actual) { - Assert.assertEquals(expected.size(), actual.size()); + assertEquals(expected.size(), actual.size()); for (String k : expected.keySet()) { - Assert.assertTrue(actual.containsKey(k)); + assertTrue(actual.containsKey(k)); Set expectedS1 = new HashSet<>(expected.get(k)); Set actualS2 = new HashSet<>(actual.get(k)); - Assert.assertEquals(expectedS1, actualS2); - Assert.assertTrue(expectedS1.containsAll(actualS2)); + assertEquals(expectedS1, actualS2); + assertTrue(expectedS1.containsAll(actualS2)); } } @@ -86,7 +90,7 @@ public static void assertLabelsToNodesEquals( public static void assertMapContains(Map> expected, ImmutableMap> actual) { for (NodeId k : actual.keySet()) { - Assert.assertTrue(expected.containsKey(k)); + assertTrue(expected.containsKey(k)); assertCollectionEquals(expected.get(k), actual.get(k)); } } @@ -94,28 +98,28 @@ public static void assertMapContains(Map> expected, public static void assertCollectionEquals(Collection expected, Collection actual) { if (expected == null) { - Assert.assertNull(actual); + assertNull(actual); } else { - Assert.assertNotNull(actual); + assertNotNull(actual); } Set expectedSet = new HashSet<>(expected); Set actualSet = new HashSet<>(actual); - Assert.assertEquals(expectedSet, actualSet); - Assert.assertTrue(expectedSet.containsAll(actualSet)); + assertEquals(expectedSet, actualSet); + assertTrue(expectedSet.containsAll(actualSet)); } public static void assertNLCollectionEquals(Collection expected, Collection actual) { if (expected == null) { - Assert.assertNull(actual); + assertNull(actual); } else { - Assert.assertNotNull(actual); + assertNotNull(actual); } Set expectedSet = new HashSet<>(expected); Set actualSet = new HashSet<>(actual); - Assert.assertEquals(expectedSet, actualSet); - Assert.assertTrue(expectedSet.containsAll(actualSet)); + assertEquals(expectedSet, actualSet); + assertTrue(expectedSet.containsAll(actualSet)); } @SuppressWarnings("unchecked") @@ -150,13 +154,13 @@ public NodeId toNodeId(String str) { public static void assertLabelsInfoToNodesEquals( Map> expected, ImmutableMap> actual) { - Assert.assertEquals(expected.size(), actual.size()); + assertEquals(expected.size(), actual.size()); for (NodeLabel k : expected.keySet()) { - Assert.assertTrue(actual.containsKey(k)); + assertTrue(actual.containsKey(k)); Set expectedS1 = new HashSet<>(expected.get(k)); Set actualS2 = new HashSet<>(actual.get(k)); - Assert.assertEquals(expectedS1, actualS2); - Assert.assertTrue(expectedS1.containsAll(actualS2)); + assertEquals(expectedS1, actualS2); + assertTrue(expectedS1.containsAll(actualS2)); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java index 54e331b71b9..c716aaaf04e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java @@ -18,8 +18,6 @@ package org.apache.hadoop.yarn.nodelabels; -import static org.junit.Assert.assertTrue; - import java.io.IOException; import java.util.Arrays; import java.util.Collection; @@ -32,20 +30,28 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; - -public class TestCommonNodeLabelsManager extends NodeLabelTestBase { +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static java.time.Duration.ofSeconds; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTimeout; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +class TestCommonNodeLabelsManager extends NodeLabelTestBase { DummyCommonNodeLabelsManager mgr = null; - @Before - public void before() { + @BeforeEach + void before() { mgr = new DummyCommonNodeLabelsManager(); Configuration conf = new YarnConfiguration(); conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true); @@ -53,190 +59,198 @@ public void before() { mgr.start(); } - @After - public void after() { + @AfterEach + void after() { mgr.stop(); } - @Test(timeout = 5000) - public void testAddRemovelabel() throws Exception { - // Add some label - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("hello")); - verifyNodeLabelAdded(Sets.newHashSet("hello"), mgr.lastAddedlabels); - - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("world")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("hello1", "world1")); - verifyNodeLabelAdded(Sets.newHashSet("hello1", "world1"), mgr.lastAddedlabels); - - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Sets.newHashSet("hello", "world", "hello1", "world1"))); - try { - mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("hello1", - false))); - Assert.fail("IOException not thrown on exclusivity change of labels"); - } catch (Exception e) { - Assert.assertTrue("IOException is expected when exclusivity is modified", - e instanceof IOException); - } - try { - mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("hello1", - true))); - } catch (Exception e) { - Assert.assertFalse( - "IOException not expected when no change in exclusivity", - e instanceof IOException); - } - // try to remove null, empty and non-existed label, should fail - for (String p : Arrays.asList(null, CommonNodeLabelsManager.NO_LABEL, "xx")) { - boolean caught = false; + @Test + void testAddRemovelabel() throws Exception { + assertTimeout(ofSeconds(5), () -> { + // Add some label + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("hello")); + verifyNodeLabelAdded(Sets.newHashSet("hello"), mgr.lastAddedlabels); + + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("world")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("hello1", "world1")); + verifyNodeLabelAdded(Sets.newHashSet("hello1", "world1"), mgr.lastAddedlabels); + + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Sets.newHashSet("hello", "world", "hello1", "world1"))); try { - mgr.removeFromClusterNodeLabels(Arrays.asList(p)); - } catch (IOException e) { - caught = true; + mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("hello1", + false))); + fail("IOException not thrown on exclusivity change of labels"); + } catch (Exception e) { + assertTrue(e instanceof IOException, + "IOException is expected when exclusivity is modified"); + } + try { + mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("hello1", + true))); + } catch (Exception e) { + assertFalse(e instanceof IOException, + "IOException not expected when no change in exclusivity"); + } + // try to remove null, empty and non-existed label, should fail + for (String p : Arrays.asList(null, CommonNodeLabelsManager.NO_LABEL, "xx")) { + boolean caught = false; + try { + mgr.removeFromClusterNodeLabels(Arrays.asList(p)); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, + "remove label should fail when label is null/empty/non-existed"); } - Assert.assertTrue("remove label should fail " - + "when label is null/empty/non-existed", caught); - } - // Remove some label - mgr.removeFromClusterNodeLabels(Arrays.asList("hello")); - assertCollectionEquals(Sets.newHashSet("hello"), mgr.lastRemovedlabels); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("world", "hello1", "world1"))); - - mgr.removeFromClusterNodeLabels(Arrays - .asList("hello1", "world1", "world")); - Assert.assertTrue(mgr.lastRemovedlabels.containsAll(Sets.newHashSet( - "hello1", "world1", "world"))); - Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty()); + // Remove some label + mgr.removeFromClusterNodeLabels(Arrays.asList("hello")); + assertCollectionEquals(Sets.newHashSet("hello"), mgr.lastRemovedlabels); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("world", "hello1", "world1"))); + + mgr.removeFromClusterNodeLabels(Arrays + .asList("hello1", "world1", "world")); + assertTrue(mgr.lastRemovedlabels.containsAll(Sets.newHashSet( + "hello1", "world1", "world"))); + assertTrue(mgr.getClusterNodeLabelNames().isEmpty()); + }); } - @Test(timeout = 5000) - public void testAddlabelWithCase() throws Exception { - // Add some label, case will not ignore here - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("HeLlO")); - verifyNodeLabelAdded(Sets.newHashSet("HeLlO"), mgr.lastAddedlabels); - Assert.assertFalse(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("hello"))); + @Test + void testAddlabelWithCase() throws Exception { + assertTimeout(ofSeconds(5), () -> { + // Add some label, case will not ignore here + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("HeLlO")); + verifyNodeLabelAdded(Sets.newHashSet("HeLlO"), mgr.lastAddedlabels); + assertFalse(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("hello"))); + }); } - @Test(timeout = 5000) - public void testAddlabelWithExclusivity() throws Exception { - // Add some label, case will not ignore here - mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("a", false), NodeLabel.newInstance("b", true))); - Assert.assertFalse(mgr.isExclusiveNodeLabel("a")); - Assert.assertTrue(mgr.isExclusiveNodeLabel("b")); + @Test + void testAddlabelWithExclusivity() throws Exception { + assertTimeout(ofSeconds(5), () -> { + // Add some label, case will not ignore here + mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("a", false), NodeLabel.newInstance("b", true))); + assertFalse(mgr.isExclusiveNodeLabel("a")); + assertTrue(mgr.isExclusiveNodeLabel("b")); + }); } - @Test(timeout = 5000) - public void testAddInvalidlabel() throws IOException { - boolean caught = false; - try { - Set set = new HashSet(); - set.add(null); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(set); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("null label should not add to repo", caught); + @Test + void testAddInvalidlabel() throws IOException { + assertTimeout(ofSeconds(5), () -> { + boolean caught = false; + try { + Set set = new HashSet(); + set.add(null); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(set); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "null label should not add to repo"); - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(CommonNodeLabelsManager.NO_LABEL)); - } catch (IOException e) { - caught = true; - } + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(CommonNodeLabelsManager.NO_LABEL)); + } catch (IOException e) { + caught = true; + } - Assert.assertTrue("empty label should not add to repo", caught); + assertTrue(caught, "empty label should not add to repo"); - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("-?")); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("invalid label character should not add to repo", caught); + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("-?")); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "invalid label character should not add to repo"); - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(StringUtils.repeat("c", 257))); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("too long label should not add to repo", caught); + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(StringUtils.repeat("c", 257))); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "too long label should not add to repo"); - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("-aaabbb")); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("label cannot start with \"-\"", caught); + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("-aaabbb")); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "label cannot start with \"-\""); - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("_aaabbb")); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("label cannot start with \"_\"", caught); - - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("a^aabbb")); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("label cannot contains other chars like ^[] ...", caught); - - caught = false; - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("aa[a]bbb")); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("label cannot contains other chars like ^[] ...", caught); + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("_aaabbb")); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "label cannot start with \"_\""); + + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("a^aabbb")); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "label cannot contains other chars like ^[] ..."); + + caught = false; + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("aa[a]bbb")); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "label cannot contains other chars like ^[] ..."); + }); } @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(timeout = 5000) + @Test public void testAddReplaceRemoveLabelsOnNodes() throws Exception { - // set a label on a node, but label doesn't exist - boolean caught = false; - try { - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("node"), toSet("label"))); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("trying to set a label to a node but " - + "label doesn't exist in repository should fail", caught); - - // set a label on a node, but node is null or empty - try { - mgr.replaceLabelsOnNode(ImmutableMap.of( - toNodeId(CommonNodeLabelsManager.NO_LABEL), toSet("label"))); - } catch (IOException e) { - caught = true; - } - Assert.assertTrue("trying to add a empty node but succeeded", caught); - - // set node->label one by one - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p2"))); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); - assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), - toSet("p2"), toNodeId("n2"), toSet("p3"))); - assertMapEquals(mgr.lastNodeToLabels, - ImmutableMap.of(toNodeId("n2"), toSet("p3"))); - - // set bunch of node->label - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), - toNodeId("n1"), toSet("p1"))); - assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), - toSet("p1"), toNodeId("n2"), toSet("p3"), toNodeId("n3"), toSet("p3"))); - assertMapEquals(mgr.lastNodeToLabels, ImmutableMap.of(toNodeId("n3"), - toSet("p3"), toNodeId("n1"), toSet("p1"))); + assertTimeout(ofSeconds(5), () -> { + // set a label on a node, but label doesn't exist + boolean caught = false; + try { + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("node"), toSet("label"))); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "trying to set a label to a node but " + + "label doesn't exist in repository should fail"); + + // set a label on a node, but node is null or empty + try { + mgr.replaceLabelsOnNode(ImmutableMap.of( + toNodeId(CommonNodeLabelsManager.NO_LABEL), toSet("label"))); + } catch (IOException e) { + caught = true; + } + assertTrue(caught, "trying to add a empty node but succeeded"); + + // set node->label one by one + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p2"))); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); + assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), + toSet("p2"), toNodeId("n2"), toSet("p3"))); + assertMapEquals(mgr.lastNodeToLabels, + ImmutableMap.of(toNodeId("n2"), toSet("p3"))); + + // set bunch of node->label + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), + toNodeId("n1"), toSet("p1"))); + assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), + toSet("p1"), toNodeId("n2"), toSet("p3"), toNodeId("n3"), toSet("p3"))); + assertMapEquals(mgr.lastNodeToLabels, ImmutableMap.of(toNodeId("n3"), + toSet("p3"), toNodeId("n1"), toSet("p1"))); /* * n1: p1 @@ -244,376 +258,395 @@ public void testAddReplaceRemoveLabelsOnNodes() throws Exception { * n3: p3 */ - // remove label on node - mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), - toSet("p3"), toNodeId("n3"), toSet("p3"))); - assertMapEquals(mgr.lastNodeToLabels, - ImmutableMap.of(toNodeId("n1"), CommonNodeLabelsManager.EMPTY_STRING_SET)); - - // add label on node - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - assertMapEquals( - mgr.getNodeLabels(), - ImmutableMap.of(toNodeId("n1"), toSet("p1"), toNodeId("n2"), - toSet("p3"), toNodeId("n3"), toSet("p3"))); - assertMapEquals(mgr.lastNodeToLabels, - ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - - // remove labels on node - mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"), - toNodeId("n2"), toSet("p3"), toNodeId("n3"), toSet("p3"))); - Assert.assertEquals(0, mgr.getNodeLabels().size()); - assertMapEquals(mgr.lastNodeToLabels, ImmutableMap.of(toNodeId("n1"), - CommonNodeLabelsManager.EMPTY_STRING_SET, toNodeId("n2"), - CommonNodeLabelsManager.EMPTY_STRING_SET, toNodeId("n3"), - CommonNodeLabelsManager.EMPTY_STRING_SET)); + // remove label on node + mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), + toSet("p3"), toNodeId("n3"), toSet("p3"))); + assertMapEquals(mgr.lastNodeToLabels, + ImmutableMap.of(toNodeId("n1"), CommonNodeLabelsManager.EMPTY_STRING_SET)); + + // add label on node + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + assertMapEquals( + mgr.getNodeLabels(), + ImmutableMap.of(toNodeId("n1"), toSet("p1"), toNodeId("n2"), + toSet("p3"), toNodeId("n3"), toSet("p3"))); + assertMapEquals(mgr.lastNodeToLabels, + ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + + // remove labels on node + mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"), + toNodeId("n2"), toSet("p3"), toNodeId("n3"), toSet("p3"))); + assertEquals(0, mgr.getNodeLabels().size()); + assertMapEquals(mgr.lastNodeToLabels, ImmutableMap.of(toNodeId("n1"), + CommonNodeLabelsManager.EMPTY_STRING_SET, toNodeId("n2"), + CommonNodeLabelsManager.EMPTY_STRING_SET, toNodeId("n3"), + CommonNodeLabelsManager.EMPTY_STRING_SET)); + }); } - @Test(timeout = 5000) - public void testRemovelabelWithNodes() throws Exception { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p2"))); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n3"), toSet("p3"))); - - mgr.removeFromClusterNodeLabels(ImmutableSet.of("p1")); - assertMapEquals(mgr.getNodeLabels(), - ImmutableMap.of(toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3"))); - assertCollectionEquals(Arrays.asList("p1"), mgr.lastRemovedlabels); - - mgr.removeFromClusterNodeLabels(ImmutableSet.of("p2", "p3")); - Assert.assertTrue(mgr.getNodeLabels().isEmpty()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty()); - assertCollectionEquals(Arrays.asList("p2", "p3"), mgr.lastRemovedlabels); + @Test + void testRemovelabelWithNodes() throws Exception { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p2"))); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n3"), toSet("p3"))); + + mgr.removeFromClusterNodeLabels(ImmutableSet.of("p1")); + assertMapEquals(mgr.getNodeLabels(), + ImmutableMap.of(toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3"))); + assertCollectionEquals(Arrays.asList("p1"), mgr.lastRemovedlabels); + + mgr.removeFromClusterNodeLabels(ImmutableSet.of("p2", "p3")); + assertTrue(mgr.getNodeLabels().isEmpty()); + assertTrue(mgr.getClusterNodeLabelNames().isEmpty()); + assertCollectionEquals(Arrays.asList("p2", "p3"), mgr.lastRemovedlabels); + }); } - @Test(timeout = 5000) - public void testTrimLabelsWhenAddRemoveNodeLabels() throws IOException { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1")); - assertCollectionEquals(toSet("p1"), mgr.getClusterNodeLabelNames()); - mgr.removeFromClusterNodeLabels(toSet("p1 ")); - Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty()); + @Test + void testTrimLabelsWhenAddRemoveNodeLabels() throws IOException { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1")); + assertCollectionEquals(toSet("p1"), mgr.getClusterNodeLabelNames()); + mgr.removeFromClusterNodeLabels(toSet("p1 ")); + assertTrue(mgr.getClusterNodeLabelNames().isEmpty()); + }); } - @Test(timeout = 5000) - public void testTrimLabelsWhenModifyLabelsOnNodes() throws IOException { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1", "p2")); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1 "))); - assertMapEquals( - mgr.getNodeLabels(), - ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet(" p2"))); - assertMapEquals( - mgr.getNodeLabels(), - ImmutableMap.of(toNodeId("n1"), toSet("p2"))); - mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet(" p2 "))); - Assert.assertTrue(mgr.getNodeLabels().isEmpty()); + @Test + void testTrimLabelsWhenModifyLabelsOnNodes() throws IOException { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1", "p2")); + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1 "))); + assertMapEquals( + mgr.getNodeLabels(), + ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet(" p2"))); + assertMapEquals( + mgr.getNodeLabels(), + ImmutableMap.of(toNodeId("n1"), toSet("p2"))); + mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet(" p2 "))); + assertTrue(mgr.getNodeLabels().isEmpty()); + }); } - @Test(timeout = 5000) - public void testReplaceLabelsOnHostsShouldUpdateNodesBelongTo() + @Test + void testReplaceLabelsOnHostsShouldUpdateNodesBelongTo() throws IOException { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - assertMapEquals( - mgr.getNodeLabels(), - ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - - // Replace labels on n1:1 to P2 - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"), - toNodeId("n1:2"), toSet("p2"))); - assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), - toSet("p1"), toNodeId("n1:1"), toSet("p2"), toNodeId("n1:2"), - toSet("p2"))); - - // Replace labels on n1 to P1, both n1:1/n1 will be P1 now - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), - toSet("p1"), toNodeId("n1:1"), toSet("p1"), toNodeId("n1:2"), - toSet("p1"))); - - // Set labels on n1:1 to P2 again to verify if add/remove works - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"))); + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + assertMapEquals( + mgr.getNodeLabels(), + ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + + // Replace labels on n1:1 to P2 + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"), + toNodeId("n1:2"), toSet("p2"))); + assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), + toSet("p1"), toNodeId("n1:1"), toSet("p2"), toNodeId("n1:2"), + toSet("p2"))); + + // Replace labels on n1 to P1, both n1:1/n1 will be P1 now + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + assertMapEquals(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n1"), + toSet("p1"), toNodeId("n1:1"), toSet("p1"), toNodeId("n1:2"), + toSet("p1"))); + + // Set labels on n1:1 to P2 again to verify if add/remove works + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"))); + }); } private void assertNodeLabelsDisabledErrorMessage(IOException e) { - Assert.assertEquals(CommonNodeLabelsManager.NODE_LABELS_NOT_ENABLED_ERR, + assertEquals(CommonNodeLabelsManager.NODE_LABELS_NOT_ENABLED_ERR, e.getMessage()); } - @Test(timeout = 5000) - public void testNodeLabelsDisabled() throws IOException { - DummyCommonNodeLabelsManager mgr = new DummyCommonNodeLabelsManager(); - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, false); - mgr.init(conf); - mgr.start(); - boolean caught = false; - - // add labels - try { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x")); - } catch (IOException e) { - assertNodeLabelsDisabledErrorMessage(e); - caught = true; - } - // check exception caught - Assert.assertTrue(caught); - caught = false; - - // remove labels - try { - mgr.removeFromClusterNodeLabels(ImmutableSet.of("x")); - } catch (IOException e) { - assertNodeLabelsDisabledErrorMessage(e); - caught = true; - } - // check exception caught - Assert.assertTrue(caught); - caught = false; - - // add labels to node - try { - mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("host", 0), - CommonNodeLabelsManager.EMPTY_STRING_SET)); - } catch (IOException e) { - assertNodeLabelsDisabledErrorMessage(e); - caught = true; - } - // check exception caught - Assert.assertTrue(caught); - caught = false; - - // remove labels from node - try { - mgr.removeLabelsFromNode(ImmutableMap.of(NodeId.newInstance("host", 0), - CommonNodeLabelsManager.EMPTY_STRING_SET)); - } catch (IOException e) { - assertNodeLabelsDisabledErrorMessage(e); - caught = true; - } - // check exception caught - Assert.assertTrue(caught); - caught = false; - - // replace labels on node - try { - mgr.replaceLabelsOnNode(ImmutableMap.of(NodeId.newInstance("host", 0), - CommonNodeLabelsManager.EMPTY_STRING_SET)); - } catch (IOException e) { - assertNodeLabelsDisabledErrorMessage(e); - caught = true; - } - // check exception caught - Assert.assertTrue(caught); - caught = false; - - mgr.close(); - } - - @Test(timeout = 5000) - public void testLabelsToNodes() - throws IOException { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - Map> labelsToNodes = mgr.getLabelsToNodes(); - assertLabelsToNodesEquals( - labelsToNodes, - ImmutableMap.of( - "p1", toSet(toNodeId("n1")))); - assertLabelsToNodesEquals( - labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); - - // Replace labels on n1:1 to P2 - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"), - toNodeId("n1:2"), toSet("p2"))); - labelsToNodes = mgr.getLabelsToNodes(); - assertLabelsToNodesEquals( - labelsToNodes, - ImmutableMap.of( - "p1", toSet(toNodeId("n1")), - "p2", toSet(toNodeId("n1:1"),toNodeId("n1:2")))); - assertLabelsToNodesEquals( - labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); - - // Replace labels on n1 to P1, both n1:1/n1 will be P1 now - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - labelsToNodes = mgr.getLabelsToNodes(); - assertLabelsToNodesEquals( - labelsToNodes, - ImmutableMap.of( - "p1", toSet(toNodeId("n1"),toNodeId("n1:1"),toNodeId("n1:2")))); - assertLabelsToNodesEquals( - labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); - - // Set labels on n1:1 to P2 again to verify if add/remove works - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"))); - // Add p3 to n1, should makes n1:1 to be p2/p3, and n1:2 to be p1/p3 - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); - labelsToNodes = mgr.getLabelsToNodes(); - assertLabelsToNodesEquals( - labelsToNodes, - ImmutableMap.of( - "p1", toSet(toNodeId("n1"),toNodeId("n1:2")), - "p2", toSet(toNodeId("n1:1")), - "p3", toSet(toNodeId("n2")))); - assertLabelsToNodesEquals( - labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); - - // Remove P3 from n1, should makes n1:1 to be p2, and n1:2 to be p1 - mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); - labelsToNodes = mgr.getLabelsToNodes(); - assertLabelsToNodesEquals( - labelsToNodes, - ImmutableMap.of( - "p1", toSet(toNodeId("n1"),toNodeId("n1:2")), - "p2", toSet(toNodeId("n1:1")))); - assertLabelsToNodesEquals( - labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); + @Test + void testNodeLabelsDisabled() throws IOException { + assertTimeout(ofSeconds(5), () -> { + DummyCommonNodeLabelsManager mgr = new DummyCommonNodeLabelsManager(); + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, false); + mgr.init(conf); + mgr.start(); + boolean caught = false; + + // add labels + try { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x")); + } catch (IOException e) { + assertNodeLabelsDisabledErrorMessage(e); + caught = true; + } + // check exception caught + assertTrue(caught); + caught = false; + + // remove labels + try { + mgr.removeFromClusterNodeLabels(ImmutableSet.of("x")); + } catch (IOException e) { + assertNodeLabelsDisabledErrorMessage(e); + caught = true; + } + // check exception caught + assertTrue(caught); + caught = false; + + // add labels to node + try { + mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("host", 0), + CommonNodeLabelsManager.EMPTY_STRING_SET)); + } catch (IOException e) { + assertNodeLabelsDisabledErrorMessage(e); + caught = true; + } + // check exception caught + assertTrue(caught); + caught = false; + + // remove labels from node + try { + mgr.removeLabelsFromNode(ImmutableMap.of(NodeId.newInstance("host", 0), + CommonNodeLabelsManager.EMPTY_STRING_SET)); + } catch (IOException e) { + assertNodeLabelsDisabledErrorMessage(e); + caught = true; + } + // check exception caught + assertTrue(caught); + caught = false; + + // replace labels on node + try { + mgr.replaceLabelsOnNode(ImmutableMap.of(NodeId.newInstance("host", 0), + CommonNodeLabelsManager.EMPTY_STRING_SET)); + } catch (IOException e) { + assertNodeLabelsDisabledErrorMessage(e); + caught = true; + } + // check exception caught + assertTrue(caught); + caught = false; + + mgr.close(); + }); } - @Test(timeout = 5000) - public void testLabelsToNodesForSelectedLabels() - throws IOException { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.addLabelsToNode( - ImmutableMap.of( - toNodeId("n1:1"), toSet("p1"), - toNodeId("n1:2"), toSet("p2"))); - Set setlabels = - new HashSet(Arrays.asList(new String[]{"p1"})); - assertLabelsToNodesEquals(mgr.getLabelsToNodes(setlabels), - ImmutableMap.of("p1", toSet(toNodeId("n1:1")))); - - // Replace labels on n1:1 to P3 - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p3"))); - assertTrue(mgr.getLabelsToNodes(setlabels).isEmpty()); - setlabels = new HashSet(Arrays.asList(new String[]{"p2", "p3"})); - assertLabelsToNodesEquals( - mgr.getLabelsToNodes(setlabels), - ImmutableMap.of( - "p3", toSet(toNodeId("n1"), toNodeId("n1:1"),toNodeId("n1:2")))); - - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p2"))); - assertLabelsToNodesEquals( - mgr.getLabelsToNodes(setlabels), - ImmutableMap.of( - "p2", toSet(toNodeId("n2")), - "p3", toSet(toNodeId("n1"), toNodeId("n1:1"),toNodeId("n1:2")))); - - mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p3"))); - setlabels = - new HashSet(Arrays.asList(new String[]{"p1", "p2", "p3"})); - assertLabelsToNodesEquals( - mgr.getLabelsToNodes(setlabels), - ImmutableMap.of( - "p2", toSet(toNodeId("n2")))); - - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n3"), toSet("p1"))); - assertLabelsToNodesEquals( - mgr.getLabelsToNodes(setlabels), - ImmutableMap.of( - "p1", toSet(toNodeId("n3")), - "p2", toSet(toNodeId("n2")))); - - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2:2"), toSet("p3"))); - assertLabelsToNodesEquals( - mgr.getLabelsToNodes(setlabels), - ImmutableMap.of( - "p1", toSet(toNodeId("n3")), - "p2", toSet(toNodeId("n2")), - "p3", toSet(toNodeId("n2:2")))); - setlabels = new HashSet(Arrays.asList(new String[]{"p1"})); - assertLabelsToNodesEquals(mgr.getLabelsToNodes(setlabels), - ImmutableMap.of("p1", toSet(toNodeId("n3")))); + @Test + void testLabelsToNodes() throws IOException { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + Map> labelsToNodes = mgr.getLabelsToNodes(); + assertLabelsToNodesEquals( + labelsToNodes, + ImmutableMap.of( + "p1", toSet(toNodeId("n1")))); + assertLabelsToNodesEquals( + labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); + + // Replace labels on n1:1 to P2 + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"), + toNodeId("n1:2"), toSet("p2"))); + labelsToNodes = mgr.getLabelsToNodes(); + assertLabelsToNodesEquals( + labelsToNodes, + ImmutableMap.of( + "p1", toSet(toNodeId("n1")), + "p2", toSet(toNodeId("n1:1"), toNodeId("n1:2")))); + assertLabelsToNodesEquals( + labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); + + // Replace labels on n1 to P1, both n1:1/n1 will be P1 now + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + labelsToNodes = mgr.getLabelsToNodes(); + assertLabelsToNodesEquals( + labelsToNodes, + ImmutableMap.of( + "p1", toSet(toNodeId("n1"), toNodeId("n1:1"), toNodeId("n1:2")))); + assertLabelsToNodesEquals( + labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); + + // Set labels on n1:1 to P2 again to verify if add/remove works + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p2"))); + // Add p3 to n1, should makes n1:1 to be p2/p3, and n1:2 to be p1/p3 + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); + labelsToNodes = mgr.getLabelsToNodes(); + assertLabelsToNodesEquals( + labelsToNodes, + ImmutableMap.of( + "p1", toSet(toNodeId("n1"), toNodeId("n1:2")), + "p2", toSet(toNodeId("n1:1")), + "p3", toSet(toNodeId("n2")))); + assertLabelsToNodesEquals( + labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); + + // Remove P3 from n1, should makes n1:1 to be p2, and n1:2 to be p1 + mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); + labelsToNodes = mgr.getLabelsToNodes(); + assertLabelsToNodesEquals( + labelsToNodes, + ImmutableMap.of( + "p1", toSet(toNodeId("n1"), toNodeId("n1:2")), + "p2", toSet(toNodeId("n1:1")))); + assertLabelsToNodesEquals( + labelsToNodes, transposeNodeToLabels(mgr.getNodeLabels())); + }); } - @Test(timeout = 5000) - public void testNoMoreThanOneLabelExistedInOneHost() throws IOException { - boolean failed = false; - // As in YARN-2694, we temporarily disable no more than one label existed in - // one host - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - try { - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2"))); - } catch (IOException e) { - failed = true; - } - Assert.assertTrue("Should failed when set > 1 labels on a host", failed); + @Test + void testLabelsToNodesForSelectedLabels() throws IOException { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.addLabelsToNode( + ImmutableMap.of( + toNodeId("n1:1"), toSet("p1"), + toNodeId("n1:2"), toSet("p2"))); + Set setlabels = + new HashSet(Arrays.asList(new String[]{"p1"})); + assertLabelsToNodesEquals(mgr.getLabelsToNodes(setlabels), + ImmutableMap.of("p1", toSet(toNodeId("n1:1")))); + + // Replace labels on n1:1 to P3 + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p3"))); + assertTrue(mgr.getLabelsToNodes(setlabels).isEmpty()); + setlabels = new HashSet(Arrays.asList(new String[]{"p2", "p3"})); + assertLabelsToNodesEquals( + mgr.getLabelsToNodes(setlabels), + ImmutableMap.of( + "p3", toSet(toNodeId("n1"), toNodeId("n1:1"), toNodeId("n1:2")))); + + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p2"))); + assertLabelsToNodesEquals( + mgr.getLabelsToNodes(setlabels), + ImmutableMap.of( + "p2", toSet(toNodeId("n2")), + "p3", toSet(toNodeId("n1"), toNodeId("n1:1"), toNodeId("n1:2")))); + + mgr.removeLabelsFromNode(ImmutableMap.of(toNodeId("n1"), toSet("p3"))); + setlabels = + new HashSet(Arrays.asList(new String[]{"p1", "p2", "p3"})); + assertLabelsToNodesEquals( + mgr.getLabelsToNodes(setlabels), + ImmutableMap.of( + "p2", toSet(toNodeId("n2")))); + + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n3"), toSet("p1"))); + assertLabelsToNodesEquals( + mgr.getLabelsToNodes(setlabels), + ImmutableMap.of( + "p1", toSet(toNodeId("n3")), + "p2", toSet(toNodeId("n2")))); + + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2:2"), toSet("p3"))); + assertLabelsToNodesEquals( + mgr.getLabelsToNodes(setlabels), + ImmutableMap.of( + "p1", toSet(toNodeId("n3")), + "p2", toSet(toNodeId("n2")), + "p3", toSet(toNodeId("n2:2")))); + setlabels = new HashSet(Arrays.asList(new String[]{"p1"})); + assertLabelsToNodesEquals(mgr.getLabelsToNodes(setlabels), + ImmutableMap.of("p1", toSet(toNodeId("n3")))); + }); + } - try { - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2"))); - } catch (IOException e) { - failed = true; - } - Assert.assertTrue("Should failed when add > 1 labels on a host", failed); + @Test + void testNoMoreThanOneLabelExistedInOneHost() throws IOException { + assertTimeout(ofSeconds(5), () -> { + boolean failed = false; + // As in YARN-2694, we temporarily disable no more than one label existed in + // one host + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + try { + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2"))); + } catch (IOException e) { + failed = true; + } + assertTrue(failed, "Should failed when set > 1 labels on a host"); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - // add a same label to a node, #labels in this node is still 1, shouldn't - // fail - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - try { - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p2"))); - } catch (IOException e) { - failed = true; - } - Assert.assertTrue("Should failed when #labels > 1 on a host after add", - failed); + try { + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2"))); + } catch (IOException e) { + failed = true; + } + assertTrue(failed, "Should failed when add > 1 labels on a host"); + + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + // add a same label to a node, #labels in this node is still 1, shouldn't + // fail + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + try { + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p2"))); + } catch (IOException e) { + failed = true; + } + assertTrue(failed, "Should failed when #labels > 1 on a host after add"); + }); } private void verifyNodeLabelAdded(Set expectedAddedLabelNames, Collection addedNodeLabels) { - Assert.assertEquals(expectedAddedLabelNames.size(), addedNodeLabels.size()); + assertEquals(expectedAddedLabelNames.size(), addedNodeLabels.size()); for (NodeLabel label : addedNodeLabels) { - Assert.assertTrue(expectedAddedLabelNames.contains(label.getName())); + assertTrue(expectedAddedLabelNames.contains(label.getName())); } } - @Test(timeout = 5000) - public void testReplaceLabelsOnNodeInDistributedMode() throws Exception { - //create new DummyCommonNodeLabelsManager than the one got from @before - mgr.stop(); - mgr = new DummyCommonNodeLabelsManager(); - Configuration conf = new YarnConfiguration(); - conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true); - conf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, - YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE); - - mgr.init(conf); - mgr.start(); - - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - Set labelsByNode = mgr.getLabelsByNode(toNodeId("n1")); - - Assert.assertNull( - "Labels are not expected to be written to the NodeLabelStore", - mgr.lastNodeToLabels); - Assert.assertNotNull("Updated labels should be available from the Mgr", - labelsByNode); - Assert.assertTrue(labelsByNode.contains("p1")); + @Test + void testReplaceLabelsOnNodeInDistributedMode() throws Exception { + assertTimeout(ofSeconds(5), () -> { + //create new DummyCommonNodeLabelsManager than the one got from @before + mgr.stop(); + mgr = new DummyCommonNodeLabelsManager(); + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true); + conf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, + YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE); + + mgr.init(conf); + mgr.start(); + + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + Set labelsByNode = mgr.getLabelsByNode(toNodeId("n1")); + + assertNull(mgr.lastNodeToLabels, + "Labels are not expected to be written to the NodeLabelStore"); + assertNotNull(labelsByNode, + "Updated labels should be available from the Mgr"); + assertTrue(labelsByNode.contains("p1")); + }); } - @Test(timeout = 5000) - public void testLabelsInfoToNodes() throws IOException { - mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", false), - NodeLabel.newInstance("p2", true), NodeLabel.newInstance("p3", true))); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); - Map> labelsToNodes = mgr.getLabelsInfoToNodes(); - assertLabelsInfoToNodesEquals(labelsToNodes, ImmutableMap.of( - NodeLabel.newInstance("p1", false), toSet(toNodeId("n1")))); + @Test + void testLabelsInfoToNodes() throws IOException { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", false), + NodeLabel.newInstance("p2", true), NodeLabel.newInstance("p3", true))); + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"))); + Map> labelsToNodes = mgr.getLabelsInfoToNodes(); + assertLabelsInfoToNodesEquals(labelsToNodes, ImmutableMap.of( + NodeLabel.newInstance("p1", false), toSet(toNodeId("n1")))); + }); } - @Test(timeout = 5000) - public void testGetNodeLabelsInfo() throws IOException { - mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", false), - NodeLabel.newInstance("p2", true), NodeLabel.newInstance("p3", false))); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p2"))); - mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); + @Test + void testGetNodeLabelsInfo() throws IOException { + assertTimeout(ofSeconds(5), () -> { + mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", false), + NodeLabel.newInstance("p2", true), NodeLabel.newInstance("p3", false))); + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p2"))); + mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n2"), toSet("p3"))); - assertLabelInfoMapEquals(mgr.getNodeLabelsInfo(), ImmutableMap.of( - toNodeId("n1"), toSet(NodeLabel.newInstance("p2", true)), - toNodeId("n2"), toSet(NodeLabel.newInstance("p3", false)))); + assertLabelInfoMapEquals(mgr.getNodeLabelsInfo(), ImmutableMap.of( + toNodeId("n1"), toSet(NodeLabel.newInstance("p2", true)), + toNodeId("n2"), toSet(NodeLabel.newInstance("p3", false)))); + }); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java index ed2f4aa6c74..823ebd00310 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java @@ -21,8 +21,8 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; -import java.util.Collection; import java.util.Map; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -30,17 +30,23 @@ import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.InlineDispatcher; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; import org.mockito.Mockito; import com.google.common.collect.ImmutableMap; -@RunWith(Parameterized.class) +import static java.time.Duration.ofSeconds; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTimeout; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase { MockNodeLabelManager mgr = null; Configuration conf = null; @@ -67,17 +73,23 @@ protected void stopDispatcher() { public TestFileSystemNodeLabelsStore(String className) { this.storeClassName = className; } - - @Parameterized.Parameters - public static Collection getParameters() { - return Arrays.asList( - new String[][] { { FileSystemNodeLabelsStore.class.getCanonicalName() }, - { NonAppendableFSNodeLabelStore.class.getCanonicalName() } }); + + static class NodeLabelsStoreProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) { + return Stream.of( + FileSystemNodeLabelsStore.class.getCanonicalName(), + NonAppendableFSNodeLabelStore.class.getCanonicalName()) + .map(Arguments::of); + } } - @Before - public void before() throws IOException { + @BeforeEach + void before() throws IOException { mgr = new MockNodeLabelManager(); + } + + private void setup(String storeClassName) throws IOException { conf = new Configuration(); conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true); conf.set(YarnConfiguration.FS_NODE_LABELS_STORE_IMPL_CLASS, storeClassName); @@ -91,8 +103,8 @@ public void before() throws IOException { mgr.start(); } - @After - public void after() throws IOException { + @AfterEach + void after() throws IOException { if (mgr.store instanceof FileSystemNodeLabelsStore) { FileSystemNodeLabelsStore fsStore = ((FileSystemNodeLabelsStore) mgr.store); @@ -102,164 +114,181 @@ public void after() throws IOException { } @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(timeout = 10000) - public void testRecoverWithMirror() throws Exception { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6")); - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"), - toNodeId("n2"), toSet("p2"))); - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), - toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), - toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); + @ParameterizedTest + @ArgumentsSource(NodeLabelsStoreProvider.class) + void testRecoverWithMirror(String storeClassName) throws Exception { + assertTimeout(ofSeconds(10), () -> { + setup(storeClassName); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6")); + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"), + toNodeId("n2"), toSet("p2"))); + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), + toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), + toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); /* * node -> partition p1: n1 p2: n2 p3: n3 p4: n4 p5: n5 p6: n6, n7 */ - mgr.removeFromClusterNodeLabels(toSet("p1")); - mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); + mgr.removeFromClusterNodeLabels(toSet("p1")); + mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); /* * After removed p2: n2 p4: n4 p6: n6, n7 */ - // shutdown mgr and start a new mgr - mgr.stop(); - - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); - - // check variables - Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("p2", "p4", "p6"))); - - assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), - toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), - toNodeId("n7"), toSet("p6"))); - assertLabelsToNodesEquals(mgr.getLabelsToNodes(), - ImmutableMap.of( - "p6", toSet(toNodeId("n6"), toNodeId("n7")), - "p4", toSet(toNodeId("n4")), - "p2", toSet(toNodeId("n2")))); - - // stutdown mgr and start a new mgr - mgr.stop(); - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); - - // check variables - Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("p2", "p4", "p6"))); - - assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), - toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), - toNodeId("n7"), toSet("p6"))); - assertLabelsToNodesEquals(mgr.getLabelsToNodes(), - ImmutableMap.of( - "p6", toSet(toNodeId("n6"), toNodeId("n7")), - "p4", toSet(toNodeId("n4")), - "p2", toSet(toNodeId("n2")))); - mgr.stop(); + // shutdown mgr and start a new mgr + mgr.stop(); + + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); + + // check variables + assertEquals(3, mgr.getClusterNodeLabelNames().size()); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("p2", "p4", "p6"))); + + assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), + toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), + toNodeId("n7"), toSet("p6"))); + assertLabelsToNodesEquals(mgr.getLabelsToNodes(), + ImmutableMap.of( + "p6", toSet(toNodeId("n6"), toNodeId("n7")), + "p4", toSet(toNodeId("n4")), + "p2", toSet(toNodeId("n2")))); + + // stutdown mgr and start a new mgr + mgr.stop(); + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); + + // check variables + assertEquals(3, mgr.getClusterNodeLabelNames().size()); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("p2", "p4", "p6"))); + + assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), + toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), + toNodeId("n7"), toSet("p6"))); + assertLabelsToNodesEquals(mgr.getLabelsToNodes(), + ImmutableMap.of( + "p6", toSet(toNodeId("n6"), toNodeId("n7")), + "p4", toSet(toNodeId("n4")), + "p2", toSet(toNodeId("n2")))); + mgr.stop(); + }); } @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(timeout = 10000) - public void testRecoverWithDistributedNodeLabels() throws Exception { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6")); - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"), - toNodeId("n2"), toSet("p2"))); - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), - toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), - toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); - - mgr.removeFromClusterNodeLabels(toSet("p1")); - mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); - mgr.stop(); - - mgr = new MockNodeLabelManager(); - Configuration cf = new Configuration(conf); - cf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, - YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE); - mgr.init(cf); - mgr.start(); - - // check variables - Assert.assertEquals(3, mgr.getClusterNodeLabels().size()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("p2", "p4", "p6"))); - - Assert.assertTrue("During recovery in distributed node-labels setup, " - + "node to labels mapping should not be recovered ", mgr - .getNodeLabels().size() == 0); - - mgr.stop(); + @ParameterizedTest + @ArgumentsSource(NodeLabelsStoreProvider.class) + void testRecoverWithDistributedNodeLabels(String storeClassName) + throws Exception { + assertTimeout(ofSeconds(10), () -> { + setup(storeClassName); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6")); + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"), + toNodeId("n2"), toSet("p2"))); + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), + toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), + toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); + + mgr.removeFromClusterNodeLabels(toSet("p1")); + mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); + mgr.stop(); + + mgr = new MockNodeLabelManager(); + Configuration cf = new Configuration(conf); + cf.set(YarnConfiguration.NODELABEL_CONFIGURATION_TYPE, + YarnConfiguration.DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE); + mgr.init(cf); + mgr.start(); + + // check variables + assertEquals(3, mgr.getClusterNodeLabels().size()); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("p2", "p4", "p6"))); + + assertEquals(0, mgr.getNodeLabels().size(), + "During recovery in distributed node-labels setup, " + + "node to labels mapping should not be recovered "); + + mgr.stop(); + }); } @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(timeout = 10000) - public void testEditlogRecover() throws Exception { - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4")); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6")); - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"), - toNodeId("n2"), toSet("p2"))); - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), - toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), - toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); + @ParameterizedTest + @ArgumentsSource(NodeLabelsStoreProvider.class) + void testEditlogRecover(String storeClassName) throws Exception { + assertTimeout(ofSeconds(10), () -> { + setup(storeClassName); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4")); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6")); + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"), + toNodeId("n2"), toSet("p2"))); + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), + toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), + toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); /* * node -> partition p1: n1 p2: n2 p3: n3 p4: n4 p5: n5 p6: n6, n7 */ - mgr.removeFromClusterNodeLabels(toSet("p1")); - mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); + mgr.removeFromClusterNodeLabels(toSet("p1")); + mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); /* * After removed p2: n2 p4: n4 p6: n6, n7 */ - // shutdown mgr and start a new mgr - mgr.stop(); - - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); - - // check variables - Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("p2", "p4", "p6"))); - - assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), - toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), - toNodeId("n7"), toSet("p6"))); - assertLabelsToNodesEquals(mgr.getLabelsToNodes(), - ImmutableMap.of( - "p6", toSet(toNodeId("n6"), toNodeId("n7")), - "p4", toSet(toNodeId("n4")), - "p2", toSet(toNodeId("n2")))); - mgr.stop(); + // shutdown mgr and start a new mgr + mgr.stop(); + + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); + + // check variables + assertEquals(3, mgr.getClusterNodeLabelNames().size()); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("p2", "p4", "p6"))); + + assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), + toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), + toNodeId("n7"), toSet("p6"))); + assertLabelsToNodesEquals(mgr.getLabelsToNodes(), + ImmutableMap.of( + "p6", toSet(toNodeId("n6"), toNodeId("n7")), + "p4", toSet(toNodeId("n4")), + "p2", toSet(toNodeId("n2")))); + mgr.stop(); + }); } @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test (timeout = 10000) - public void testSerilizationAfterRecovery() throws Exception { - // Add to cluster node labels, p2/p6 are non-exclusive. - mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", true), - NodeLabel.newInstance("p2", false), NodeLabel.newInstance("p3", true), - NodeLabel.newInstance("p4", true), NodeLabel.newInstance("p5", true), - NodeLabel.newInstance("p6", false))); - - mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"), - toNodeId("n2"), toSet("p2"))); - mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), - toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), - toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); + @ParameterizedTest + @ArgumentsSource(NodeLabelsStoreProvider.class) + void testSerilizationAfterRecovery(String storeClassName) + throws Exception { + assertTimeout(ofSeconds(10), () -> { + setup(storeClassName); + // Add to cluster node labels, p2/p6 are non-exclusive. + mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", true), + NodeLabel.newInstance("p2", false), NodeLabel.newInstance("p3", true), + NodeLabel.newInstance("p4", true), NodeLabel.newInstance("p5", true), + NodeLabel.newInstance("p6", false))); + + mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"), + toNodeId("n2"), toSet("p2"))); + mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"), + toNodeId("n4"), toSet("p4"), toNodeId("n5"), toSet("p5"), + toNodeId("n6"), toSet("p6"), toNodeId("n7"), toSet("p6"))); /* * node -> labels @@ -271,8 +300,8 @@ public void testSerilizationAfterRecovery() throws Exception { * p6: n6, n7 */ - mgr.removeFromClusterNodeLabels(toSet("p1")); - mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); + mgr.removeFromClusterNodeLabels(toSet("p1")); + mgr.removeFromClusterNodeLabels(Arrays.asList("p3", "p5")); /* * After removed @@ -281,65 +310,68 @@ public void testSerilizationAfterRecovery() throws Exception { * p6: n6, n7 */ - // shutdown mgr and start a new mgr - mgr.stop(); + // shutdown mgr and start a new mgr + mgr.stop(); - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); - // check variables - Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("p2", "p4", "p6"))); + // check variables + assertEquals(3, mgr.getClusterNodeLabelNames().size()); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("p2", "p4", "p6"))); - assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), - toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), - toNodeId("n7"), toSet("p6"))); - assertLabelsToNodesEquals(mgr.getLabelsToNodes(), - ImmutableMap.of( - "p6", toSet(toNodeId("n6"), toNodeId("n7")), - "p4", toSet(toNodeId("n4")), - "p2", toSet(toNodeId("n2")))); + assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"), + toSet("p2"), toNodeId("n4"), toSet("p4"), toNodeId("n6"), toSet("p6"), + toNodeId("n7"), toSet("p6"))); + assertLabelsToNodesEquals(mgr.getLabelsToNodes(), + ImmutableMap.of( + "p6", toSet(toNodeId("n6"), toNodeId("n7")), + "p4", toSet(toNodeId("n4")), + "p2", toSet(toNodeId("n2")))); - Assert.assertFalse(mgr.isExclusiveNodeLabel("p2")); - Assert.assertTrue(mgr.isExclusiveNodeLabel("p4")); - Assert.assertFalse(mgr.isExclusiveNodeLabel("p6")); + assertFalse(mgr.isExclusiveNodeLabel("p2")); + assertTrue(mgr.isExclusiveNodeLabel("p4")); + assertFalse(mgr.isExclusiveNodeLabel("p6")); /* * Add label p7,p8 then shutdown */ - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p7", "p8")); - mgr.stop(); + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p7", "p8")); + mgr.stop(); /* * Restart, add label p9 and shutdown */ - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); - mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p9")); - mgr.stop(); + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); + mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p9")); + mgr.stop(); /* * Recovery, and see if p9 added */ - mgr = new MockNodeLabelManager(); - mgr.init(conf); - mgr.start(); - - // check variables - Assert.assertEquals(6, mgr.getClusterNodeLabelNames().size()); - Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll( - Arrays.asList("p2", "p4", "p6", "p7", "p8", "p9"))); - mgr.stop(); + mgr = new MockNodeLabelManager(); + mgr.init(conf); + mgr.start(); + + // check variables + assertEquals(6, mgr.getClusterNodeLabelNames().size()); + assertTrue(mgr.getClusterNodeLabelNames().containsAll( + Arrays.asList("p2", "p4", "p6", "p7", "p8", "p9"))); + mgr.stop(); + }); } - @Test - public void testRootMkdirOnInitStore() throws Exception { + @ParameterizedTest + @ArgumentsSource(NodeLabelsStoreProvider.class) + void testRootMkdirOnInitStore(String storeClassName) throws Exception { + setup(storeClassName); final FileSystem mockFs = Mockito.mock(FileSystem.class); FileSystemNodeLabelsStore mockStore = new FileSystemNodeLabelsStore() { void setFileSystem(Configuration conf) throws IOException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml index 5f8509764e8..3d7f74c3b7b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml @@ -90,6 +90,11 @@ test + org.junit.jupiter + junit-jupiter-api + test + + org.mockito mockito-all test diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/records/TestFederationProtocolRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/records/TestFederationProtocolRecords.java index cf8cf719d01..4f1d1465fc5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/records/TestFederationProtocolRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/store/records/TestFederationProtocolRecords.java @@ -76,16 +76,16 @@ import org.apache.hadoop.yarn.server.federation.store.records.impl.pb.UpdateApplicationHomeSubClusterRequestPBImpl; import org.apache.hadoop.yarn.server.federation.store.records.impl.pb.UpdateApplicationHomeSubClusterResponsePBImpl; import org.apache.hadoop.yarn.server.records.Version; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Test class for federation protocol records. */ -public class TestFederationProtocolRecords extends BasePBImplRecordsTest { +class TestFederationProtocolRecords extends BasePBImplRecordsTest { - @BeforeClass - public static void setup() throws Exception { + @BeforeAll + static void setup() throws Exception { generateByNewInstance(ApplicationId.class); generateByNewInstance(Version.class); generateByNewInstance(SubClusterId.class); @@ -95,168 +95,168 @@ public static void setup() throws Exception { } @Test - public void testSubClusterId() throws Exception { + void testSubClusterId() throws Exception { validatePBImplRecord(SubClusterIdPBImpl.class, SubClusterIdProto.class); } @Test - public void testSubClusterInfo() throws Exception { + void testSubClusterInfo() throws Exception { validatePBImplRecord(SubClusterInfoPBImpl.class, SubClusterInfoProto.class); } @Test - public void testSubClusterRegisterRequest() throws Exception { + void testSubClusterRegisterRequest() throws Exception { validatePBImplRecord(SubClusterRegisterRequestPBImpl.class, SubClusterRegisterRequestProto.class); } @Test - public void testSubClusterRegisterResponse() throws Exception { + void testSubClusterRegisterResponse() throws Exception { validatePBImplRecord(SubClusterRegisterResponsePBImpl.class, SubClusterRegisterResponseProto.class); } @Test - public void testSubClusterDeregisterRequest() throws Exception { + void testSubClusterDeregisterRequest() throws Exception { validatePBImplRecord(SubClusterDeregisterRequestPBImpl.class, SubClusterDeregisterRequestProto.class); } @Test - public void testSubClusterDeregisterResponse() throws Exception { + void testSubClusterDeregisterResponse() throws Exception { validatePBImplRecord(SubClusterDeregisterResponsePBImpl.class, SubClusterDeregisterResponseProto.class); } @Test - public void testSubClusterHeartbeatRequest() throws Exception { + void testSubClusterHeartbeatRequest() throws Exception { validatePBImplRecord(SubClusterHeartbeatRequestPBImpl.class, SubClusterHeartbeatRequestProto.class); } @Test - public void testSubClusterHeartbeatResponse() throws Exception { + void testSubClusterHeartbeatResponse() throws Exception { validatePBImplRecord(SubClusterHeartbeatResponsePBImpl.class, SubClusterHeartbeatResponseProto.class); } @Test - public void testGetSubClusterRequest() throws Exception { + void testGetSubClusterRequest() throws Exception { validatePBImplRecord(GetSubClusterInfoRequestPBImpl.class, GetSubClusterInfoRequestProto.class); } @Test - public void testGetSubClusterResponse() throws Exception { + void testGetSubClusterResponse() throws Exception { validatePBImplRecord(GetSubClusterInfoResponsePBImpl.class, GetSubClusterInfoResponseProto.class); } @Test - public void testGetSubClustersInfoRequest() throws Exception { + void testGetSubClustersInfoRequest() throws Exception { validatePBImplRecord(GetSubClustersInfoRequestPBImpl.class, GetSubClustersInfoRequestProto.class); } @Test - public void testGetSubClustersInfoResponse() throws Exception { + void testGetSubClustersInfoResponse() throws Exception { validatePBImplRecord(GetSubClustersInfoResponsePBImpl.class, GetSubClustersInfoResponseProto.class); } @Test - public void testAddApplicationHomeSubClusterRequest() throws Exception { + void testAddApplicationHomeSubClusterRequest() throws Exception { validatePBImplRecord(AddApplicationHomeSubClusterRequestPBImpl.class, AddApplicationHomeSubClusterRequestProto.class); } @Test - public void testAddApplicationHomeSubClusterResponse() throws Exception { + void testAddApplicationHomeSubClusterResponse() throws Exception { validatePBImplRecord(AddApplicationHomeSubClusterResponsePBImpl.class, AddApplicationHomeSubClusterResponseProto.class); } @Test - public void testUpdateApplicationHomeSubClusterRequest() throws Exception { + void testUpdateApplicationHomeSubClusterRequest() throws Exception { validatePBImplRecord(UpdateApplicationHomeSubClusterRequestPBImpl.class, UpdateApplicationHomeSubClusterRequestProto.class); } @Test - public void testUpdateApplicationHomeSubClusterResponse() throws Exception { + void testUpdateApplicationHomeSubClusterResponse() throws Exception { validatePBImplRecord(UpdateApplicationHomeSubClusterResponsePBImpl.class, UpdateApplicationHomeSubClusterResponseProto.class); } @Test - public void testGetApplicationHomeSubClusterRequest() throws Exception { + void testGetApplicationHomeSubClusterRequest() throws Exception { validatePBImplRecord(GetApplicationHomeSubClusterRequestPBImpl.class, GetApplicationHomeSubClusterRequestProto.class); } @Test - public void testGetApplicationHomeSubClusterResponse() throws Exception { + void testGetApplicationHomeSubClusterResponse() throws Exception { validatePBImplRecord(GetApplicationHomeSubClusterResponsePBImpl.class, GetApplicationHomeSubClusterResponseProto.class); } @Test - public void testGetApplicationsHomeSubClusterRequest() throws Exception { + void testGetApplicationsHomeSubClusterRequest() throws Exception { validatePBImplRecord(GetApplicationsHomeSubClusterRequestPBImpl.class, GetApplicationsHomeSubClusterRequestProto.class); } @Test - public void testGetApplicationsHomeSubClusterResponse() throws Exception { + void testGetApplicationsHomeSubClusterResponse() throws Exception { validatePBImplRecord(GetApplicationsHomeSubClusterResponsePBImpl.class, GetApplicationsHomeSubClusterResponseProto.class); } @Test - public void testDeleteApplicationHomeSubClusterRequest() throws Exception { + void testDeleteApplicationHomeSubClusterRequest() throws Exception { validatePBImplRecord(DeleteApplicationHomeSubClusterRequestPBImpl.class, DeleteApplicationHomeSubClusterRequestProto.class); } @Test - public void testDeleteApplicationHomeSubClusterResponse() throws Exception { + void testDeleteApplicationHomeSubClusterResponse() throws Exception { validatePBImplRecord(DeleteApplicationHomeSubClusterResponsePBImpl.class, DeleteApplicationHomeSubClusterResponseProto.class); } @Test - public void testGetSubClusterPolicyConfigurationRequest() throws Exception { + void testGetSubClusterPolicyConfigurationRequest() throws Exception { validatePBImplRecord(GetSubClusterPolicyConfigurationRequestPBImpl.class, GetSubClusterPolicyConfigurationRequestProto.class); } @Test - public void testGetSubClusterPolicyConfigurationResponse() throws Exception { + void testGetSubClusterPolicyConfigurationResponse() throws Exception { validatePBImplRecord(GetSubClusterPolicyConfigurationResponsePBImpl.class, GetSubClusterPolicyConfigurationResponseProto.class); } @Test - public void testSetSubClusterPolicyConfigurationRequest() throws Exception { + void testSetSubClusterPolicyConfigurationRequest() throws Exception { validatePBImplRecord(SetSubClusterPolicyConfigurationRequestPBImpl.class, SetSubClusterPolicyConfigurationRequestProto.class); } @Test - public void testSetSubClusterPolicyConfigurationResponse() throws Exception { + void testSetSubClusterPolicyConfigurationResponse() throws Exception { validatePBImplRecord(SetSubClusterPolicyConfigurationResponsePBImpl.class, SetSubClusterPolicyConfigurationResponseProto.class); } @Test - public void testGetSubClusterPoliciesConfigurationsRequest() + void testGetSubClusterPoliciesConfigurationsRequest() throws Exception { validatePBImplRecord(GetSubClusterPoliciesConfigurationsRequestPBImpl.class, GetSubClusterPoliciesConfigurationsRequestProto.class); } @Test - public void testGetSubClusterPoliciesConfigurationsResponse() + void testGetSubClusterPoliciesConfigurationsResponse() throws Exception { validatePBImplRecord( GetSubClusterPoliciesConfigurationsResponsePBImpl.class,