diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 1d9fc166c745a..91fe4b91f3414 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -219,6 +219,11 @@ mockito-inline test + + org.mockito + mockito-junit-jupiter + test + org.apache.avro avro diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummySharedResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummySharedResource.java index a7cf41dd997ec..a2fbd15f5c36d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummySharedResource.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummySharedResource.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.ha; -import org.junit.Assert; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * A fake shared resource, for use in automatic failover testing. @@ -47,6 +47,6 @@ public synchronized void release(DummyHAService oldHolder) { } public synchronized void assertNoViolations() { - Assert.assertEquals(0, violations); + assertEquals(0, violations); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java index 8d3075f45263b..1f5bf0779fdeb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.ha; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java index e8c57f1efd717..b63eb3089d05a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java @@ -36,9 +36,8 @@ import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.ZooDefs.Ids; -import org.junit.Before; -import org.junit.Test; -import org.junit.Assert; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -49,7 +48,13 @@ import org.apache.hadoop.util.ZKUtil.ZKAuthInfo; import org.apache.hadoop.test.GenericTestUtils; -import static org.mockito.ArgumentMatchers.any; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.any; public class TestActiveStandbyElector { @@ -92,7 +97,7 @@ protected void sleepFor(int ms) { private static final String ZK_BREADCRUMB_NAME = ZK_PARENT_NAME + "/" + ActiveStandbyElector.BREADCRUMB_FILENAME; - @Before + @BeforeEach public void init() throws IOException, KeeperException { count = 0; mockZK = Mockito.mock(ZooKeeper.class); @@ -123,9 +128,10 @@ private void mockPriorActive(byte[] data) throws Exception { /** * verify that joinElection checks for null data */ - @Test(expected = HadoopIllegalArgumentException.class) + @Test public void testJoinElectionException() { - elector.joinElection(null); + assertThrows(HadoopIllegalArgumentException.class, + () -> elector.joinElection(null)); } /** @@ -177,7 +183,7 @@ public void testCreateNodeResultBecomeActive() throws Exception { public void testFailToBecomeActive() throws Exception { mockNoPriorActive(); elector.joinElection(data); - Assert.assertEquals(0, elector.sleptFor); + assertEquals(0, elector.sleptFor); Mockito.doThrow(new ServiceFailedException("failed to become active")) .when(mockApp).becomeActive(); @@ -189,8 +195,8 @@ public void testFailToBecomeActive() throws Exception { // should re-join Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); - Assert.assertEquals(2, count); - Assert.assertTrue(elector.sleptFor > 0); + assertEquals(2, count); + assertTrue(elector.sleptFor > 0); } /** @@ -202,7 +208,7 @@ public void testFailToBecomeActive() throws Exception { public void testFailToBecomeActiveAfterZKDisconnect() throws Exception { mockNoPriorActive(); elector.joinElection(data); - Assert.assertEquals(0, elector.sleptFor); + assertEquals(0, elector.sleptFor); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); @@ -226,8 +232,8 @@ public void testFailToBecomeActiveAfterZKDisconnect() throws Exception { // should re-join Mockito.verify(mockZK, Mockito.times(3)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); - Assert.assertEquals(2, count); - Assert.assertTrue(elector.sleptFor > 0); + assertEquals(2, count); + assertTrue(elector.sleptFor > 0); } @@ -331,7 +337,7 @@ public void testCreateNodeResultRetryBecomeActive() throws Exception { elector.joinElection(data); // recreate connection via getNewZooKeeper - Assert.assertEquals(2, count); + assertEquals(2, count); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, @@ -457,10 +463,10 @@ public void testProcessCallbackEventNone() throws Exception { Event.KeeperState.SyncConnected); elector.processWatchEvent(mockZK, mockEvent); verifyExistCall(1); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.SESSIONEXPIRED.intValue(), ZK_LOCK_NAME, mockZK, new Stat()); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); // session expired should enter safe mode and initiate re-election // re-election checked via checking re-creation of new zookeeper and @@ -471,7 +477,7 @@ public void testProcessCallbackEventNone() throws Exception { Mockito.verify(mockApp, Mockito.times(1)).enterNeutralMode(); // called getNewZooKeeper to create new session. first call was in // constructor - Assert.assertEquals(2, count); + assertEquals(2, count); // once in initial joinElection and one now Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); @@ -504,13 +510,13 @@ public void testProcessCallbackEventNode() throws Exception { ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); Stat stat = new Stat(); stat.setEphemeralOwner(0L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); WatchedEvent mockEvent = Mockito.mock(WatchedEvent.class); Mockito.when(mockEvent.getPath()).thenReturn(ZK_LOCK_NAME); @@ -520,18 +526,18 @@ public void testProcessCallbackEventNode() throws Exception { Event.EventType.NodeDataChanged); elector.processWatchEvent(mockZK, mockEvent); verifyExistCall(2); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); // monitoring should be setup again after event is received Mockito.when(mockEvent.getType()).thenReturn( Event.EventType.NodeChildrenChanged); elector.processWatchEvent(mockZK, mockEvent); verifyExistCall(3); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); // lock node deletion when in standby mode should create znode again // successful znode creation enters active state and sets monitor @@ -546,10 +552,10 @@ public void testProcessCallbackEventNode() throws Exception { ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); verifyExistCall(4); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); stat.setEphemeralOwner(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); // lock node deletion in active mode should enter neutral mode and create // znode again successful znode creation enters active state and sets @@ -564,9 +570,9 @@ public void testProcessCallbackEventNode() throws Exception { ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(2)).becomeActive(); verifyExistCall(5); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); // bad path name results in fatal error Mockito.when(mockEvent.getPath()).thenReturn(null); @@ -574,7 +580,7 @@ public void testProcessCallbackEventNode() throws Exception { Mockito.verify(mockApp, Mockito.times(1)).notifyFatalError( "Unexpected watch error from Zookeeper"); // fatal error means no new connection other than one from constructor - Assert.assertEquals(1, count); + assertEquals(1, count); // no new watches after fatal error verifyExistCall(5); @@ -599,13 +605,13 @@ public void testSuccessiveStandbyCalls() { ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); - Assert.assertTrue(elector.isMonitorLockNodePending()); + assertTrue(elector.isMonitorLockNodePending()); Stat stat = new Stat(); stat.setEphemeralOwner(0L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); - Assert.assertFalse(elector.isMonitorLockNodePending()); + assertFalse(elector.isMonitorLockNodePending()); WatchedEvent mockEvent = Mockito.mock(WatchedEvent.class); Mockito.when(mockEvent.getPath()).thenReturn(ZK_LOCK_NAME); @@ -644,7 +650,7 @@ public void testQuitElection() throws Exception { byte[] data = new byte[8]; elector.joinElection(data); // getNewZooKeeper called 2 times. once in constructor and once now - Assert.assertEquals(2, count); + assertEquals(2, count); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); @@ -669,7 +675,7 @@ public void testGetActiveData() throws ActiveNotFoundException, Mockito.when( mockZK.getData(Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any())).thenReturn(data); - Assert.assertEquals(data, elector.getActiveData()); + assertEquals(data, elector.getActiveData()); Mockito.verify(mockZK, Mockito.times(1)).getData( Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any()); @@ -680,7 +686,7 @@ public void testGetActiveData() throws ActiveNotFoundException, new KeeperException.NoNodeException()); try { elector.getActiveData(); - Assert.fail("ActiveNotFoundException expected"); + fail("ActiveNotFoundException expected"); } catch(ActiveNotFoundException e) { Mockito.verify(mockZK, Mockito.times(2)).getData( Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any()); @@ -693,7 +699,7 @@ public void testGetActiveData() throws ActiveNotFoundException, any())).thenThrow( new KeeperException.AuthFailedException()); elector.getActiveData(); - Assert.fail("KeeperException.AuthFailedException expected"); + fail("KeeperException.AuthFailedException expected"); } catch(KeeperException.AuthFailedException ke) { Mockito.verify(mockZK, Mockito.times(3)).getData( Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any()); @@ -762,7 +768,7 @@ public void testEnsureBaseNodeFails() throws Exception { Mockito.eq(Ids.OPEN_ACL_UNSAFE), Mockito.eq(CreateMode.PERSISTENT)); try { elector.ensureParentZNode(); - Assert.fail("Did not throw!"); + fail("Did not throw!"); } catch (IOException ioe) { if (!(ioe.getCause() instanceof KeeperException.ConnectionLossException)) { throw ioe; @@ -791,7 +797,7 @@ protected ZooKeeper createZooKeeper() throws IOException { }; - Assert.fail("Did not throw zookeeper connection loss exceptions!"); + fail("Did not throw zookeeper connection loss exceptions!"); } catch (KeeperException ke) { GenericTestUtils.assertExceptionContains( "ConnectionLoss", ke); } @@ -842,14 +848,14 @@ protected synchronized ZooKeeper connectToZooKeeper() { = ArgumentCaptor.forClass(ZKClientConfig.class); Mockito.verify(e).initiateZookeeper(configArgumentCaptor.capture()); ZKClientConfig clientConfig = configArgumentCaptor.getValue(); - Assert.assertEquals(defaultConfig.getProperty(ZKClientConfig.SECURE_CLIENT), - clientConfig.getProperty(ZKClientConfig.SECURE_CLIENT)); - Assert.assertEquals(defaultConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET), - clientConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); - Assert.assertNull(clientConfig.getProperty(clientX509Util.getSslKeystoreLocationProperty())); - Assert.assertNull(clientConfig.getProperty(clientX509Util.getSslKeystorePasswdProperty())); - Assert.assertNull(clientConfig.getProperty(clientX509Util.getSslTruststoreLocationProperty())); - Assert.assertNull(clientConfig.getProperty(clientX509Util.getSslTruststorePasswdProperty())); + assertEquals(defaultConfig.getProperty(ZKClientConfig.SECURE_CLIENT), + clientConfig.getProperty(ZKClientConfig.SECURE_CLIENT)); + assertEquals(defaultConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET), + clientConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); + assertNull(clientConfig.getProperty(clientX509Util.getSslKeystoreLocationProperty())); + assertNull(clientConfig.getProperty(clientX509Util.getSslKeystorePasswdProperty())); + assertNull(clientConfig.getProperty(clientX509Util.getSslTruststoreLocationProperty())); + assertNull(clientConfig.getProperty(clientX509Util.getSslTruststorePasswdProperty())); } /** @@ -882,17 +888,17 @@ protected synchronized ZooKeeper connectToZooKeeper() { = ArgumentCaptor.forClass(ZKClientConfig.class); Mockito.verify(e).initiateZookeeper(configArgumentCaptor.capture()); ZKClientConfig clientConfig = configArgumentCaptor.getValue(); - Assert.assertEquals("true", clientConfig.getProperty(ZKClientConfig.SECURE_CLIENT)); - Assert.assertEquals("org.apache.zookeeper.ClientCnxnSocketNetty", - clientConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); - Assert.assertEquals("keystore_location", - clientConfig.getProperty(clientX509Util.getSslKeystoreLocationProperty())); - Assert.assertEquals("keystore_password", - clientConfig.getProperty(clientX509Util.getSslKeystorePasswdProperty())); - Assert.assertEquals("truststore_location", - clientConfig.getProperty(clientX509Util.getSslTruststoreLocationProperty())); - Assert.assertEquals("truststore_password", - clientConfig.getProperty(clientX509Util.getSslTruststorePasswdProperty())); + assertEquals("true", clientConfig.getProperty(ZKClientConfig.SECURE_CLIENT)); + assertEquals("org.apache.zookeeper.ClientCnxnSocketNetty", + clientConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); + assertEquals("keystore_location", + clientConfig.getProperty(clientX509Util.getSslKeystoreLocationProperty())); + assertEquals("keystore_password", + clientConfig.getProperty(clientX509Util.getSslKeystorePasswdProperty())); + assertEquals("truststore_location", + clientConfig.getProperty(clientX509Util.getSslTruststoreLocationProperty())); + assertEquals("truststore_password", + clientConfig.getProperty(clientX509Util.getSslTruststorePasswdProperty())); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestFailoverController.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestFailoverController.java index 3f027fa1c598a..01b9ef9c16787 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestFailoverController.java @@ -34,11 +34,14 @@ import static org.apache.hadoop.ha.TestNodeFencer.setupFencer; import org.apache.hadoop.security.AccessControlException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.internal.stubbing.answers.ThrowsException; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestFailoverController { private InetSocketAddress svc1Addr = new InetSocketAddress("svc1", 1234); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java index a027b4d682b9f..cd2a4c7c1c824 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.ha; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -28,8 +29,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.slf4j.Logger; @@ -44,7 +45,7 @@ public class TestHAAdmin { private String errOutput; private String output; - @Before + @BeforeEach public void setup() throws IOException { tool = new HAAdmin() { @Override diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java index 8738372fc4b38..b24d8eadb1fee 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.ha; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.net.InetSocketAddress; @@ -30,8 +31,9 @@ import org.apache.hadoop.ha.HealthMonitor.State; import org.apache.hadoop.util.Time; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,7 +49,7 @@ public class TestHealthMonitor { private DummyHAService svc; - @Before + @BeforeEach public void setupHM() throws InterruptedException, IOException { Configuration conf = new Configuration(); conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 1); @@ -78,7 +80,8 @@ protected DummyHAService createDummyHAService() { new InetSocketAddress("0.0.0.0", 0), true); } - @Test(timeout=15000) + @Test + @Timeout(value = 15) public void testMonitor() throws Exception { LOG.info("Mocking bad health check, waiting for UNHEALTHY"); svc.isHealthy = false; @@ -112,7 +115,8 @@ public void testMonitor() throws Exception { * Test that the proper state is propagated when the health monitor * sees an uncaught exception in its thread. */ - @Test(timeout=15000) + @Test + @Timeout(value = 15) public void testHealthMonitorDies() throws Exception { LOG.info("Mocking RTE in health monitor, waiting for FAILED"); throwOOMEOnCreate = true; @@ -128,7 +132,8 @@ public void testHealthMonitorDies() throws Exception { * health monitor and thus change its state to FAILED * @throws Exception */ - @Test(timeout=15000) + @Test + @Timeout(value = 15) public void testCallbackThrowsRTE() throws Exception { hm.addCallback(new Callback() { @Override diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java index be67848e2120a..205f36a30180f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java @@ -17,7 +17,12 @@ */ package org.apache.hadoop.ha; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; import java.net.InetSocketAddress; import java.util.List; @@ -26,9 +31,8 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Shell; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestNodeFencer { @@ -40,16 +44,16 @@ public class TestNodeFencer { private static String FENCER_TRUE_COMMAND_UNIX = "shell(true)"; private static String FENCER_TRUE_COMMAND_WINDOWS = "shell(rem)"; - @Before + @BeforeEach public void clearMockState() { AlwaysSucceedFencer.fenceCalled = 0; AlwaysSucceedFencer.callArgs.clear(); AlwaysFailFencer.fenceCalled = 0; AlwaysFailFencer.callArgs.clear(); - MOCK_TARGET = Mockito.mock(HAServiceTarget.class); - Mockito.doReturn("my mock").when(MOCK_TARGET).toString(); - Mockito.doReturn(new InetSocketAddress("host", 1234)) + MOCK_TARGET = mock(HAServiceTarget.class); + doReturn("my mock").when(MOCK_TARGET).toString(); + doReturn(new InetSocketAddress("host", 1234)) .when(MOCK_TARGET).getAddress(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java index 3eb6f42e467ed..61537e1249f25 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java @@ -17,7 +17,15 @@ */ package org.apache.hadoop.ha; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.contains; +import static org.mockito.Mockito.endsWith; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.reset; import java.lang.reflect.Method; import java.net.InetSocketAddress; @@ -28,17 +36,15 @@ import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; -import static org.mockito.Mockito.mock; - public class TestShellCommandFencer { private ShellCommandFencer fencer = createFencer(); private static final HAServiceTarget TEST_TARGET = @@ -46,19 +52,19 @@ public class TestShellCommandFencer { new InetSocketAddress("dummyhost", 1234)); private static final Logger LOG = ShellCommandFencer.LOG; - @BeforeClass + @BeforeAll public static void setupLogMock() { ShellCommandFencer.LOG = mock(Logger.class, new LogAnswer()); } - @AfterClass + @AfterAll public static void tearDownLogMock() throws Exception { ShellCommandFencer.LOG = LOG; } - @Before + @BeforeEach public void resetLogSpy() { - Mockito.reset(ShellCommandFencer.LOG); + reset(ShellCommandFencer.LOG); } private static ShellCommandFencer createFencer() { @@ -88,9 +94,8 @@ public void testCheckNoArgs() { new NodeFencer(conf, "shell"); fail("Didn't throw when passing no args to shell"); } catch (BadFencingConfigurationException confe) { - assertTrue( - "Unexpected exception:" + StringUtils.stringifyException(confe), - confe.getMessage().contains("No argument passed")); + assertTrue(confe.getMessage().contains("No argument passed"), + "Unexpected exception:" + StringUtils.stringifyException(confe)); } } @@ -101,9 +106,8 @@ public void testCheckParensNoArgs() { new NodeFencer(conf, "shell()"); fail("Didn't throw when passing no args to shell"); } catch (BadFencingConfigurationException confe) { - assertTrue( - "Unexpected exception:" + StringUtils.stringifyException(confe), - confe.getMessage().contains("Unable to parse line: 'shell()'")); + assertTrue(confe.getMessage().contains("Unable to parse line: 'shell()'"), + "Unexpected exception:" + StringUtils.stringifyException(confe)); } } @@ -114,8 +118,8 @@ public void testCheckParensNoArgs() { @Test public void testStdoutLogging() { assertTrue(fencer.tryFence(TEST_TARGET, "echo hello")); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.endsWith("echo hello: hello")); + verify(ShellCommandFencer.LOG).info( + endsWith("echo hello: hello")); } /** @@ -125,8 +129,8 @@ public void testStdoutLogging() { @Test public void testStderrLogging() { assertTrue(fencer.tryFence(TEST_TARGET, "echo hello>&2")); - Mockito.verify(ShellCommandFencer.LOG).warn( - Mockito.endsWith("echo hello>&2: hello")); + verify(ShellCommandFencer.LOG).warn( + endsWith("echo hello>&2: hello")); } /** @@ -137,12 +141,12 @@ public void testStderrLogging() { public void testConfAsEnvironment() { if (!Shell.WINDOWS) { fencer.tryFence(TEST_TARGET, "echo $in_fencing_tests"); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.endsWith("echo $in...ing_tests: yessir")); + verify(ShellCommandFencer.LOG).info( + endsWith("echo $in...ing_tests: yessir")); } else { fencer.tryFence(TEST_TARGET, "echo %in_fencing_tests%"); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.endsWith("echo %in...ng_tests%: yessir")); + verify(ShellCommandFencer.LOG).info( + endsWith("echo %in...ng_tests%: yessir")); } } @@ -154,12 +158,12 @@ public void testConfAsEnvironment() { public void testTargetAsEnvironment() { if (!Shell.WINDOWS) { fencer.tryFence(TEST_TARGET, "echo $target_host $target_port"); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.endsWith("echo $ta...rget_port: dummyhost 1234")); + verify(ShellCommandFencer.LOG).info( + endsWith("echo $ta...rget_port: dummyhost 1234")); } else { fencer.tryFence(TEST_TARGET, "echo %target_host% %target_port%"); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.endsWith("echo %ta...get_port%: dummyhost 1234")); + verify(ShellCommandFencer.LOG).info( + endsWith("echo %ta...get_port%: dummyhost 1234")); } } @@ -179,18 +183,18 @@ public void testEnvironmentWithPeer() { + "echo $source_host $source_port"; if (!Shell.WINDOWS) { fencer.tryFence(target, cmd); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.contains("echo $ta...rget_port: dummytarget 1111")); + verify(ShellCommandFencer.LOG).info( + contains("echo $ta...rget_port: dummytarget 1111")); fencer.tryFence(source, cmd); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.contains("echo $so...urce_port: dummysource 2222")); + verify(ShellCommandFencer.LOG).info( + contains("echo $so...urce_port: dummysource 2222")); } else { fencer.tryFence(target, cmd); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.contains("echo %ta...get_port%: dummytarget 1111")); + verify(ShellCommandFencer.LOG).info( + contains("echo %ta...get_port%: dummytarget 1111")); fencer.tryFence(source, cmd); - Mockito.verify(ShellCommandFencer.LOG).info( - Mockito.contains("echo %so...urce_port%: dummysource 2222")); + verify(ShellCommandFencer.LOG).info( + contains("echo %so...urce_port%: dummysource 2222")); } } @@ -201,7 +205,8 @@ public void testEnvironmentWithPeer() { * so that, if we use 'ssh', it won't try to prompt for a password * and block forever, for example. */ - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testSubprocessInputIsClosed() { assertFalse(fencer.tryFence(TEST_TARGET, "read")); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestSshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestSshFenceByTcpPort.java index b07da8da5a89e..052ed9416fea5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestSshFenceByTcpPort.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestSshFenceByTcpPort.java @@ -17,7 +17,11 @@ */ package org.apache.hadoop.ha; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.net.InetSocketAddress; @@ -25,8 +29,8 @@ import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.SshFenceByTcpPort.Args; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assume; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.event.Level; public class TestSshFenceByTcpPort { @@ -55,9 +59,10 @@ public class TestSshFenceByTcpPort { new DummyHAService(HAServiceState.ACTIVE, new InetSocketAddress("8.8.8.8", 1234)); - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testFence() throws BadFencingConfigurationException { - Assume.assumeTrue(isConfigured()); + assumeTrue(isConfigured()); Configuration conf = new Configuration(); conf.set(SshFenceByTcpPort.CONF_IDENTITIES_KEY, TEST_KEYFILE); SshFenceByTcpPort fence = new SshFenceByTcpPort(); @@ -72,7 +77,8 @@ public void testFence() throws BadFencingConfigurationException { * Make sure that it times out and returns false, but doesn't throw * any exception */ - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testConnectTimeout() throws BadFencingConfigurationException { Configuration conf = new Configuration(); conf.setInt(SshFenceByTcpPort.CONF_CONNECT_TIMEOUT_KEY, 3000); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java index f2d5541632285..ac649311f02bc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java @@ -21,7 +21,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authorize.AccessControlList; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpServer2.Builder; @@ -42,14 +42,14 @@ * This is a base class for functional tests of the {@link HttpServer2}. * The methods are static for other classes to import statically. */ -public class HttpServerFunctionalTest extends Assert { +public class HttpServerFunctionalTest extends Assertions { @SuppressWarnings("serial") public static class LongHeaderServlet extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { - Assert.assertEquals(63 * 1024, request.getHeader("longheader").length()); + assertEquals(63 * 1024, request.getHeader("longheader").length()); response.setStatus(HttpServletResponse.SC_OK); } } @@ -244,7 +244,7 @@ public static void stop(HttpServer2 server) throws Exception { */ public static URL getServerURL(HttpServer2 server) throws MalformedURLException { - assertNotNull("No server", server); + assertNotNull(server, "No server"); return new URL("http://" + NetUtils.getHostPortString(server.getConnectorAddress(0))); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java index 44338dae9c937..545b273701fcf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java @@ -13,15 +13,14 @@ */ package org.apache.hadoop.http; -import org.junit.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import org.eclipse.jetty.util.log.Log; import javax.servlet.*; @@ -36,6 +35,9 @@ import java.util.HashMap; import java.util.List; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestAuthenticationSessionCookie { private static final String BASEDIR = GenericTestUtils.getTempPath(TestHttpCookieFlag.class.getSimpleName()); @@ -149,10 +151,10 @@ public void testSessionCookie() throws IOException { String header = conn.getHeaderField("Set-Cookie"); List cookies = HttpCookie.parse(header); - Assert.assertTrue(!cookies.isEmpty()); + assertTrue(!cookies.isEmpty()); Log.getLog().info(header); - Assert.assertFalse(header.contains("; Expires=")); - Assert.assertTrue("token".equals(cookies.get(0).getValue())); + assertFalse(header.contains("; Expires=")); + assertTrue("token".equals(cookies.get(0).getValue())); } @Test @@ -171,13 +173,13 @@ public void testPersistentCookie() throws IOException { String header = conn.getHeaderField("Set-Cookie"); List cookies = HttpCookie.parse(header); - Assert.assertTrue(!cookies.isEmpty()); + assertTrue(!cookies.isEmpty()); Log.getLog().info(header); - Assert.assertTrue(header.contains("; Expires=")); - Assert.assertTrue("token".equals(cookies.get(0).getValue())); + assertTrue(header.contains("; Expires=")); + assertTrue("token".equals(cookies.get(0).getValue())); } - @After + @AfterEach public void cleanup() throws Exception { server.stop(); FileUtil.fullyDelete(new File(BASEDIR)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java index ce068bb6f1cf6..dc5372fc2589b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java @@ -23,9 +23,9 @@ import java.net.URL; import javax.servlet.http.HttpServletResponse; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Small test to cover default disabled prof endpoint. @@ -35,14 +35,14 @@ public class TestDisabledProfileServlet extends HttpServerFunctionalTest { private static HttpServer2 server; private static URL baseUrl; - @BeforeClass + @BeforeAll public static void setup() throws Exception { server = createTestServer(); server.start(); baseUrl = getServerURL(server); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { server.stop(); } @@ -68,20 +68,20 @@ public void testQuery() throws Exception { @Test public void testRequestMethods() throws IOException { HttpURLConnection connection = getConnection("PUT"); - assertEquals("Unexpected response code", HttpServletResponse.SC_METHOD_NOT_ALLOWED, - connection.getResponseCode()); + assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + connection.getResponseCode(), "Unexpected response code"); connection.disconnect(); connection = getConnection("POST"); - assertEquals("Unexpected response code", HttpServletResponse.SC_METHOD_NOT_ALLOWED, - connection.getResponseCode()); + assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + connection.getResponseCode(), "Unexpected response code"); connection.disconnect(); connection = getConnection("DELETE"); - assertEquals("Unexpected response code", HttpServletResponse.SC_METHOD_NOT_ALLOWED, - connection.getResponseCode()); + assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + connection.getResponseCode(), "Unexpected response code"); connection.disconnect(); connection = getConnection("GET"); - assertEquals("Unexpected response code", HttpServletResponse.SC_INTERNAL_SERVER_ERROR, - connection.getResponseCode()); + assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + connection.getResponseCode(), "Unexpected response code"); connection.disconnect(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java index ade383883f10e..1a8581f7479c0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java @@ -35,7 +35,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java index 775754d9f879f..0342f5edc96e9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHtmlQuoting.java @@ -17,11 +17,14 @@ */ package org.apache.hadoop.http; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import javax.servlet.http.HttpServletRequest; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; public class TestHtmlQuoting { @@ -72,19 +75,18 @@ public void testRequestQuoting() throws Exception { new HttpServer2.QuotingInputFilter.RequestQuoter(mockReq); Mockito.doReturn("a cookies = HttpCookie.parse(header); - Assert.assertTrue(!cookies.isEmpty()); - Assert.assertTrue(header.contains("; HttpOnly")); - Assert.assertTrue("token".equals(cookies.get(0).getValue())); + assertTrue(!cookies.isEmpty()); + assertTrue(header.contains("; HttpOnly")); + assertTrue("token".equals(cookies.get(0).getValue())); } @Test @@ -135,13 +136,13 @@ public void testHttpsCookie() throws IOException, GeneralSecurityException { String header = conn.getHeaderField("Set-Cookie"); List cookies = HttpCookie.parse(header); - Assert.assertTrue(!cookies.isEmpty()); - Assert.assertTrue(header.contains("; HttpOnly")); - Assert.assertTrue(cookies.get(0).getSecure()); - Assert.assertTrue("token".equals(cookies.get(0).getValue())); + assertTrue(!cookies.isEmpty()); + assertTrue(header.contains("; HttpOnly")); + assertTrue(cookies.get(0).getSecure()); + assertTrue("token".equals(cookies.get(0).getValue())); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { server.stop(); FileUtil.fullyDelete(new File(BASEDIR)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java index 58721c4baa8f9..e150ab0427ff8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java @@ -17,25 +17,24 @@ */ package org.apache.hadoop.http; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - import org.eclipse.jetty.server.CustomRequestLog; import org.eclipse.jetty.server.RequestLog; import org.eclipse.jetty.server.Slf4jRequestLogWriter; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; public class TestHttpRequestLog { @Test public void testAppenderDefined() { RequestLog requestLog = HttpRequestLog.getRequestLog("test"); - assertNotNull("RequestLog should not be null", requestLog); - assertThat(requestLog, instanceOf(CustomRequestLog.class)); + assertNotNull(requestLog, "RequestLog should not be null"); + assertInstanceOf(CustomRequestLog.class, requestLog); CustomRequestLog crl = (CustomRequestLog) requestLog; - assertThat(crl.getWriter(), instanceOf(Slf4jRequestLogWriter.class)); + assertInstanceOf(Slf4jRequestLogWriter.class, crl.getWriter()); assertEquals(CustomRequestLog.EXTENDED_NCSA_FORMAT, crl.getFormatString()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java index bf2e8a4f2de40..d59aa34a65688 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java @@ -30,14 +30,12 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; -import org.assertj.core.api.Assertions; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.handler.StatisticsHandler; import org.eclipse.jetty.util.ajax.JSON; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,6 +71,8 @@ import java.util.concurrent.Executor; import java.util.concurrent.Executors; +import static org.assertj.core.api.Assertions.assertThat; + public class TestHttpServer extends HttpServerFunctionalTest { static final Logger LOG = LoggerFactory.getLogger(TestHttpServer.class); private static HttpServer2 server; @@ -143,7 +143,8 @@ public void doGet(HttpServletRequest request, } } - @BeforeClass public static void setup() throws Exception { + @BeforeAll + public static void setup() throws Exception { Configuration conf = new Configuration(); conf.setInt(HttpServer2.HTTP_MAX_THREADS_KEY, MAX_THREADS); conf.setBoolean( @@ -160,7 +161,8 @@ public void doGet(HttpServletRequest request, LOG.info("HTTP server started: "+ baseUrl); } - @AfterClass public static void cleanup() throws Exception { + @AfterAll + public static void cleanup() throws Exception { server.stop(); } @@ -181,8 +183,8 @@ public void run() { assertEquals("a:b\nc:d\n", readOutput(new URL(baseUrl, "/echo?a=b&c=d"))); int serverThreads = server.webServer.getThreadPool().getThreads(); - assertTrue("More threads are started than expected, Server Threads count: " - + serverThreads, serverThreads <= MAX_THREADS); + assertTrue(serverThreads <= MAX_THREADS, + "More threads are started than expected, Server Threads count: " + serverThreads); System.out.println("Number of threads = " + serverThreads + " which is less or equal than the max = " + MAX_THREADS); } catch (Exception e) { @@ -280,9 +282,9 @@ public void testHttpServer2Metrics() throws Exception { final HttpURLConnection conn = (HttpURLConnection)servletUrl.openConnection(); conn.connect(); - Assertions.assertThat(conn.getResponseCode()).isEqualTo(200); + assertThat(conn.getResponseCode()).isEqualTo(200); final int after = metrics.responses2xx(); - Assertions.assertThat(after).isGreaterThan(before); + assertThat(after).isGreaterThan(before); } /** @@ -329,7 +331,7 @@ private void validateXFrameOption(HttpServer2.XFrameOption option) throws try { HttpURLConnection conn = getHttpURLConnection(httpServer); String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS"); - assertTrue("X-FRAME-OPTIONS is absent in the header", xfoHeader != null); + assertTrue(xfoHeader != null, "X-FRAME-OPTIONS is absent in the header"); assertTrue(xfoHeader.endsWith(option.toString())); } finally { httpServer.stop(); @@ -345,7 +347,7 @@ public void testHttpResonseDoesNotContainXFrameOptions() throws Exception { try { HttpURLConnection conn = getHttpURLConnection(httpServer); String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS"); - assertTrue("Unexpected X-FRAME-OPTIONS in header", xfoHeader == null); + assertTrue(xfoHeader == null, "Unexpected X-FRAME-OPTIONS in header"); } finally { httpServer.stop(); } @@ -542,9 +544,8 @@ public void testRequestQuoterWithNull() throws Exception { Mockito.doReturn(null).when(request).getParameterValues("dummy"); RequestQuoter requestQuoter = new RequestQuoter(request); String[] parameterValues = requestQuoter.getParameterValues("dummy"); - Assert.assertNull( - "It should return null " + "when there are no values for the parameter", - parameterValues); + assertNull(parameterValues, + "It should return null when there are no values for the parameter"); } @Test @@ -554,8 +555,8 @@ public void testRequestQuoterWithNotNull() throws Exception { Mockito.doReturn(values).when(request).getParameterValues("dummy"); RequestQuoter requestQuoter = new RequestQuoter(request); String[] parameterValues = requestQuoter.getParameterValues("dummy"); - Assert.assertTrue("It should return Parameter Values", Arrays.equals( - values, parameterValues)); + assertTrue(Arrays.equals(values, parameterValues), + "It should return Parameter Values"); } @SuppressWarnings("unchecked") @@ -585,32 +586,32 @@ public void testHasAdministratorAccess() throws Exception { HttpServletResponse response = Mockito.mock(HttpServletResponse.class); //authorization OFF - Assert.assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); + assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); //authorization ON & user NULL response = Mockito.mock(HttpServletResponse.class); conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); - Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response)); + assertFalse(HttpServer2.hasAdministratorAccess(context, request, response)); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString()); //authorization ON & user NOT NULL & ACLs NULL response = Mockito.mock(HttpServletResponse.class); Mockito.when(request.getRemoteUser()).thenReturn("foo"); - Assert.assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); + assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs response = Mockito.mock(HttpServletResponse.class); AccessControlList acls = Mockito.mock(AccessControlList.class); Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(false); Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); - Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response)); + assertFalse(HttpServer2.hasAdministratorAccess(context, request, response)); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString()); //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs response = Mockito.mock(HttpServletResponse.class); Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(true); Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); - Assert.assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); + assertTrue(HttpServer2.hasAdministratorAccess(context, request, response)); } @@ -623,7 +624,7 @@ public void testRequiresAuthorizationAccess() throws Exception { HttpServletResponse response = Mockito.mock(HttpServletResponse.class); //requires admin access to instrumentation, FALSE by default - Assert.assertTrue(HttpServer2.isInstrumentationAccessAllowed(context, request, response)); + assertTrue(HttpServer2.isInstrumentationAccessAllowed(context, request, response)); //requires admin access to instrumentation, TRUE conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true); @@ -631,7 +632,7 @@ public void testRequiresAuthorizationAccess() throws Exception { AccessControlList acls = Mockito.mock(AccessControlList.class); Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(false); Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls); - Assert.assertFalse(HttpServer2.isInstrumentationAccessAllowed(context, request, response)); + assertFalse(HttpServer2.isInstrumentationAccessAllowed(context, request, response)); } @Test public void testBindAddress() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java index 4ae1190abd5af..4b6ad61a94d26 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.http; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestHttpServerLifecycle extends HttpServerFunctionalTest { @@ -27,12 +27,12 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest { * @param server server */ private void assertAlive(HttpServer2 server) { - assertTrue("Server is not alive", server.isAlive()); + assertTrue(server.isAlive(), "Server is not alive"); assertToStringContains(server, HttpServer2.STATE_DESCRIPTION_ALIVE); } private void assertNotLive(HttpServer2 server) { - assertTrue("Server should not be live", !server.isAlive()); + assertTrue(!server.isAlive(), "Server should not be live"); assertToStringContains(server, HttpServer2.STATE_DESCRIPTION_NOT_LIVE); } @@ -73,8 +73,8 @@ public void testStartedServerIsAlive() throws Throwable { */ private void assertToStringContains(HttpServer2 server, String text) { String description = server.toString(); - assertTrue("Did not find \"" + text + "\" in \"" + description + "\"", - description.contains(text)); + assertTrue(description.contains(text), + "Did not find \"" + text + "\" in \"" + description + "\""); } /** @@ -121,6 +121,6 @@ public void testWepAppContextAfterServerStop() throws Throwable { assertAlive(server); assertEquals(value, server.getAttribute(key)); stop(server); - assertNull("Server context should have cleared", server.getAttribute(key)); + assertNull(server.getAttribute(key), "Server context should have cleared"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java index a4abbd92405ce..c4f8bf22f8422 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java @@ -21,9 +21,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.net.NetUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,7 +36,7 @@ public class TestHttpServerLogs extends HttpServerFunctionalTest { static final Logger LOG = LoggerFactory.getLogger(TestHttpServerLogs.class); private static HttpServer2 server; - @BeforeClass + @BeforeAll public static void setup() throws Exception { } @@ -47,7 +47,7 @@ private void startServer(Configuration conf) throws Exception { LOG.info("HTTP server started: "+ baseUrl); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { if (server != null && server.isAlive()) { server.stop(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java index 07dbc2a7c6e23..bf36a60a9433e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java @@ -18,7 +18,7 @@ package org.apache.hadoop.http; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java index cddbc2a1959ae..d6a92f0df1de9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java @@ -34,10 +34,9 @@ import org.apache.hadoop.security.authentication.util.StringSignerSecretProviderCreator; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.ProxyUsers; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.Assert; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.FileWriter; @@ -46,7 +45,9 @@ import java.net.URI; import java.net.URL; import java.util.Properties; -import static org.junit.Assert.assertTrue; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * This class is tested for http server with SPNEGO authentication. @@ -69,7 +70,7 @@ public class TestHttpServerWithSpnego { private static MiniKdc testMiniKDC; private static File secretFile = new File(testRootDir, SECRET_STR); - @BeforeClass + @BeforeAll public static void setUp() throws Exception { try { testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir); @@ -77,14 +78,14 @@ public static void setUp() throws Exception { testMiniKDC.createPrincipal( httpSpnegoKeytabFile, HTTP_USER + "/localhost"); } catch (Exception e) { - assertTrue("Couldn't setup MiniKDC", false); + assertTrue(false, "Couldn't setup MiniKDC"); } Writer w = new FileWriter(secretFile); w.write("secret"); w.close(); } - @AfterClass + @AfterAll public static void tearDown() { if (testMiniKDC != null) { testMiniKDC.stop(); @@ -153,7 +154,7 @@ public void testAuthenticationWithProxyUser() throws Exception { HttpURLConnection conn = authUrl .openConnection(new URL(serverURL + servlet + "?doAs=userB"), token); - Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); + assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); } // userA cannot impersonate userC, it fails. @@ -162,7 +163,7 @@ public void testAuthenticationWithProxyUser() throws Exception { HttpURLConnection conn = authUrl .openConnection(new URL(serverURL + servlet + "?doAs=userC"), token); - Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN, + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode()); } @@ -173,7 +174,7 @@ public void testAuthenticationWithProxyUser() throws Exception { new String[]{"logLevel", "logs"}) { HttpURLConnection conn = authUrl .openConnection(new URL(serverURL + servlet), token); - Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); + assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); } // Setup token for userB @@ -184,7 +185,7 @@ public void testAuthenticationWithProxyUser() throws Exception { new String[]{"logLevel", "logs"}) { HttpURLConnection conn = authUrl .openConnection(new URL(serverURL + servlet), token); - Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN, + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode()); } @@ -221,13 +222,13 @@ public void testAuthenticationToAllowList() throws Exception { // endpoints in whitelist should not require Kerberos authentication for (String endpoint : allowList) { HttpURLConnection conn = (HttpURLConnection) new URL(serverURL + endpoint).openConnection(); - Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); + assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); } // endpoints not in whitelist should require Kerberos authentication for (String endpoint : denyList) { HttpURLConnection conn = (HttpURLConnection) new URL(serverURL + endpoint).openConnection(); - Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode()); + assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode()); } } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java index 22bea17a7c063..8b367a12a6ce8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java @@ -18,8 +18,8 @@ package org.apache.hadoop.http; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -29,7 +29,7 @@ import java.io.PrintWriter; import java.nio.charset.StandardCharsets; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; @@ -51,7 +51,7 @@ public class TestIsActiveServlet { private HttpServletResponse resp; private ByteArrayOutputStream respOut; - @Before + @BeforeEach public void setUp() throws Exception { req = mock(HttpServletRequest.class); resp = mock(HttpServletResponse.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestProfileServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestProfileServlet.java index 5c87451a49e6c..b895e1818942a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestProfileServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestProfileServlet.java @@ -22,9 +22,9 @@ import java.net.URL; import java.util.UUID; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,7 +38,7 @@ public class TestProfileServlet extends HttpServerFunctionalTest { private static final Logger LOG = LoggerFactory.getLogger(TestProfileServlet.class); - @BeforeClass + @BeforeAll public static void setup() throws Exception { ProfileServlet.setIsTestRun(true); System.setProperty("async.profiler.home", UUID.randomUUID().toString()); @@ -47,7 +47,7 @@ public static void setup() throws Exception { baseUrl = getServerURL(server); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { ProfileServlet.setIsTestRun(false); System.clearProperty("async.profiler.home"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java index cc76b4ad6d975..30065cc31c10d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -43,9 +43,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -106,7 +106,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest { static final String INCLUDED_PROTOCOLS = "TLSv1.2"; static final String INCLUDED_PROTOCOLS_JDK11 = "TLSv1.3,TLSv1.2"; - @BeforeClass + @BeforeAll public static void setup() throws Exception { turnOnSSLDebugLogging(); storeHttpsCipherSuites(); @@ -156,7 +156,7 @@ private static void setupServer(Configuration conf, Configuration sslConf) server.start(); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { server.stop(); FileUtil.fullyDelete(new File(BASEDIR)); @@ -286,7 +286,7 @@ public void testExcludedCiphers() throws Exception { URL url = new URL(baseUrl, SERVLET_PATH_ECHO + "?a=b&c=d"); HttpsURLConnection conn = getConnectionWithSSLSocketFactory(url, EXCLUDED_CIPHERS); - assertFalse("excludedCipher list is empty", EXCLUDED_CIPHERS.isEmpty()); + assertFalse(EXCLUDED_CIPHERS.isEmpty(), "excludedCipher list is empty"); try { readFromConnection(conn); fail("No Ciphers in common, SSLHandshake must fail."); @@ -306,8 +306,7 @@ public void testIncludedProtocols() throws Exception { HttpsURLConnection conn = getConnectionWithPreferredProtocolSSLSocketFactory(url, includedProtocols); - assertFalse("included protocol list is empty", - includedProtocols.isEmpty()); + assertFalse(includedProtocols.isEmpty(), "included protocol list is empty"); readFromConnection(conn); @@ -351,7 +350,7 @@ private void testEnabledCiphers(String ciphers) throws IOException, GeneralSecurityException { URL url = new URL(baseUrl, SERVLET_PATH_ECHO + "?a=b&c=d"); HttpsURLConnection conn = getConnectionWithSSLSocketFactory(url, ciphers); - assertFalse("excludedCipher list is empty", ciphers.isEmpty()); + assertFalse(ciphers.isEmpty(), "excludedCipher list is empty"); String out = readFromConnection(conn); assertEquals(out, "a:b\nc:d\n"); LOG.info("At least one additional enabled cipher than excluded ciphers," diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServerConfigs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServerConfigs.java index 039fae0195730..5b61509f4b0a5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServerConfigs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServerConfigs.java @@ -27,10 +27,10 @@ import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import static org.apache.hadoop.http.TestSSLHttpServer.EXCLUDED_CIPHERS; import static org.apache.hadoop.http.TestSSLHttpServer.INCLUDED_PROTOCOLS; @@ -39,6 +39,7 @@ import static org.apache.hadoop.security.ssl.KeyStoreTestUtil.CLIENT_KEY_STORE_PASSWORD_DEFAULT; import static org.apache.hadoop.security.ssl.KeyStoreTestUtil.SERVER_KEY_STORE_PASSWORD_DEFAULT; import static org.apache.hadoop.security.ssl.KeyStoreTestUtil.TRUST_STORE_PASSWORD_DEFAULT; +import static org.junit.jupiter.api.Assertions.fail; /** * Test suit for testing KeyStore and TrustStore password settings. @@ -56,7 +57,7 @@ public class TestSSLHttpServerConfigs { private static final String CLIENT_PWD = CLIENT_KEY_STORE_PASSWORD_DEFAULT; private static final String TRUST_STORE_PWD = TRUST_STORE_PASSWORD_DEFAULT; - @Before + @BeforeEach public void start() throws Exception { TestSSLHttpServer.turnOnSSLDebugLogging(); TestSSLHttpServer.storeHttpsCipherSuites(); @@ -71,7 +72,7 @@ public void start() throws Exception { sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); } - @After + @AfterEach public void shutdown() throws Exception { FileUtil.fullyDelete(new File(BASEDIR)); KeyStoreTestUtil.cleanupSSLConfig(keystoreDir, sslConfDir); @@ -136,38 +137,43 @@ public Boolean get() { } } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testServerSetup() throws Exception { setupKeyStores(SERVER_PWD, CLIENT_PWD, TRUST_STORE_PWD); testServerStart(SERVER_PWD, SERVER_PWD, TRUST_STORE_PWD); } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testServerSetupWithoutTrustPassword() throws Exception { setupKeyStores(SERVER_PWD, CLIENT_PWD, TRUST_STORE_PWD); testServerStart(SERVER_PWD, SERVER_PWD, null); } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testServerSetupWithoutKeyStorePassword() throws Exception { setupKeyStores(SERVER_PWD, CLIENT_PWD, TRUST_STORE_PWD); testServerStart(SERVER_PWD, null, null); } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testServerSetupWithoutKeyStoreKeyPassword() throws Exception { setupKeyStores(SERVER_PWD, CLIENT_PWD, TRUST_STORE_PWD); testServerStart(null, SERVER_PWD, null); } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testServerSetupWithNoKeyStorePassword() throws Exception { setupKeyStores(SERVER_PWD, CLIENT_PWD, TRUST_STORE_PWD); // Accessing KeyStore without either of KeyStore.KeyPassword or KeyStore // .password should fail. try { testServerStart(null, null, null); - Assert.fail("Server should have failed to start without any " + + fail("Server should have failed to start without any " + "KeyStore password."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Problem starting http server", @@ -175,14 +181,15 @@ public void testServerSetupWithNoKeyStorePassword() throws Exception { } } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testServerSetupWithWrongKeyStorePassword() throws Exception { setupKeyStores(SERVER_PWD, CLIENT_PWD, TRUST_STORE_PWD); // Accessing KeyStore with wrong keyStore password/ keyPassword should fail. try { testServerStart(SERVER_PWD, "wrongPassword", null); - Assert.fail("Server should have failed to start with wrong " + + fail("Server should have failed to start with wrong " + "KeyStore password."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Keystore was tampered with, " + @@ -191,7 +198,7 @@ public void testServerSetupWithWrongKeyStorePassword() throws Exception { try { testServerStart("wrongPassword", SERVER_PWD, null); - Assert.fail("Server should have failed to start with wrong " + + fail("Server should have failed to start with wrong " + "KeyStore password."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Problem starting http server", @@ -201,7 +208,8 @@ public void testServerSetupWithWrongKeyStorePassword() throws Exception { } } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testKeyStoreSetupWithoutTrustStorePassword() throws Exception { // Setup TrustStore without TrustStore password setupKeyStores(SERVER_PWD, CLIENT_PWD, ""); @@ -213,7 +221,7 @@ public void testKeyStoreSetupWithoutTrustStorePassword() throws Exception { // set) should fail. try { testServerStart(SERVER_PWD, SERVER_PWD, "wrongPassword"); - Assert.fail("Server should have failed to start with wrong " + + fail("Server should have failed to start with wrong " + "TrustStore password."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Keystore was tampered with, " + @@ -221,7 +229,8 @@ public void testKeyStoreSetupWithoutTrustStorePassword() throws Exception { } } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testKeyStoreSetupWithoutKeyStorePassword() throws Exception { // Setup KeyStore without KeyStore password setupKeyStores(SERVER_PWD, "", TRUST_STORE_PWD); @@ -233,7 +242,7 @@ public void testKeyStoreSetupWithoutKeyStorePassword() throws Exception { // set) should fail. try { testServerStart(SERVER_PWD, "wrongPassword", TRUST_STORE_PWD); - Assert.fail("Server should have failed to start with wrong " + + fail("Server should have failed to start with wrong " + "KeyStore password."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Keystore was tampered with, " + @@ -241,7 +250,8 @@ public void testKeyStoreSetupWithoutKeyStorePassword() throws Exception { } } - @Test(timeout=120000) + @Test + @Timeout(value = 120) public void testKeyStoreSetupWithoutPassword() throws Exception { // Setup KeyStore without any password setupKeyStores("", "", ""); @@ -254,7 +264,7 @@ public void testKeyStoreSetupWithoutPassword() throws Exception { try { testServerStart(null, null, null); - Assert.fail("Server should have failed to start without " + + fail("Server should have failed to start without " + "KeyStore password."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Problem starting http server", diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index a8ecbd4fe28ef..3c225d0d25058 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -35,7 +35,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/lib/TestStaticUserWebFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/lib/TestStaticUserWebFilter.java index 03b37e304619d..646a09955a2c6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/lib/TestStaticUserWebFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/lib/TestStaticUserWebFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.http.lib; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.http.lib.StaticUserWebFilter.StaticUserFilter; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java index 4234f24006999..76db3b332225e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java @@ -27,7 +27,6 @@ import java.util.Enumeration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; @@ -44,6 +43,8 @@ import org.apache.hadoop.util.Time; import org.slf4j.event.Level; +import static org.junit.jupiter.api.Assertions.fail; + /** * MiniRPCBenchmark measures time to establish an RPC connection * to a secure RPC server. @@ -222,7 +223,7 @@ public MiniProtocol run() throws IOException { } }); } catch (InterruptedException e) { - Assert.fail(Arrays.toString(e.getStackTrace())); + fail(Arrays.toString(e.getStackTrace())); } } finally { RPC.stopProxy(client); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java index 64c486c4b14f8..11ec4e83d01d9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java @@ -29,9 +29,9 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.concurrent.AsyncGetFuture; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,8 +43,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestAsyncIPC { @@ -56,7 +56,7 @@ public class TestAsyncIPC { return new AsyncGetFuture<>(Client.getAsyncRpcResponse()); } - @Before + @BeforeEach public void setupConf() { conf = new Configuration(); conf.setInt(CommonConfigurationKeys.IPC_CLIENT_ASYNC_CALLS_MAX_KEY, 10000); @@ -102,10 +102,10 @@ public void run() { void assertReturnValues() throws InterruptedException, ExecutionException { for (int i = 0; i < count; i++) { LongWritable value = returnFutures.get(i).get(); - Assert.assertEquals("call" + i + " failed.", - expectedValues.get(i).longValue(), value.get()); + assertEquals(expectedValues.get(i).longValue(), value.get(), + "call" + i + " failed."); } - Assert.assertFalse(failed); + assertFalse(failed); } void assertReturnValues(long timeout, TimeUnit unit) @@ -128,12 +128,12 @@ void assertReturnValues(long timeout, TimeUnit unit) continue; } - Assert.assertEquals("call" + i + " failed.", - expectedValues.get(i).longValue(), value.get()); + assertEquals(expectedValues.get(i).longValue(), value.get(), + "call" + i + " failed."); checked[i] = true; } } - Assert.assertFalse(failed); + assertFalse(failed); } } @@ -227,14 +227,16 @@ private void waitForReturnValues(final int start, final int end) } } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testAsyncCall() throws IOException, InterruptedException, ExecutionException { internalTestAsyncCall(3, false, 2, 5, 100); internalTestAsyncCall(3, true, 2, 5, 10); } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testAsyncCallLimit() throws IOException, InterruptedException, ExecutionException { internalTestAsyncCallLimit(100, false, 5, 10, 500); @@ -267,7 +269,8 @@ public void internalTestAsyncCall(int handlerCount, boolean handlerSleep, server.stop(); } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testCallGetReturnRpcResponseMultipleTimes() throws IOException, InterruptedException, ExecutionException { int handlerCount = 10, callCount = 100; @@ -284,14 +287,15 @@ public void testCallGetReturnRpcResponseMultipleTimes() throws IOException, caller.assertReturnValues(); caller.assertReturnValues(); caller.assertReturnValues(); - Assert.assertEquals(asyncCallCount, client.getAsyncCallCount()); + assertEquals(asyncCallCount, client.getAsyncCallCount()); } finally { client.stop(); server.stop(); } } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testFutureGetWithTimeout() throws IOException, InterruptedException, ExecutionException { // GenericTestUtils.setLogLevel(AsyncGetFuture.LOG, Level.ALL); @@ -340,7 +344,7 @@ public void internalTestAsyncCallLimit(int handlerCount, boolean handlerSleep, callers[i].getCount()); String msg = String.format("Expected not failed for caller-%d: %s.", i, callers[i]); - assertFalse(msg, callers[i].failed); + assertFalse(callers[i].failed, msg); } for (int i = 0; i < clientCount; i++) { clients[i].stop(); @@ -356,7 +360,8 @@ public void internalTestAsyncCallLimit(int handlerCount, boolean handlerSleep, * @throws ExecutionException * @throws InterruptedException */ - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testCallIdAndRetry() throws IOException, InterruptedException, ExecutionException { final Map infoMap = new HashMap(); @@ -382,7 +387,7 @@ Call createCall(RpcKind rpcKind, Writable rpcRequest) { @Override void checkResponse(RpcResponseHeaderProto header) throws IOException { super.checkResponse(header); - Assert.assertEquals(infoMap.get(header.getCallId()).retry, + assertEquals(infoMap.get(header.getCallId()).retry, header.getRetryCount()); } }; @@ -392,7 +397,7 @@ void checkResponse(RpcResponseHeaderProto header) throws IOException { server.callListener = new Runnable() { @Override public void run() { - Assert.assertEquals(infoMap.get(Server.getCallId()).retry, + assertEquals(infoMap.get(Server.getCallId()).retry, Server.getCallRetryCount()); } }; @@ -415,7 +420,8 @@ public void run() { * @throws ExecutionException * @throws InterruptedException */ - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testCallRetryCount() throws IOException, InterruptedException, ExecutionException { final int retryCount = 255; @@ -430,7 +436,7 @@ public void testCallRetryCount() throws IOException, InterruptedException, public void run() { // we have not set the retry count for the client, thus on the server // side we should see retry count as 0 - Assert.assertEquals(retryCount, Server.getCallRetryCount()); + assertEquals(retryCount, Server.getCallRetryCount()); } }; @@ -452,11 +458,13 @@ public void run() { * @throws ExecutionException * @throws InterruptedException */ - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testInitialCallRetryCount() throws IOException, InterruptedException, ExecutionException { // Override client to store the call id final Client client = new Client(LongWritable.class, conf); + Client.setCallIdAndRetryCount(Client.nextCallId(), 0, null); // Attach a listener that tracks every call ID received by the server. final TestServer server = new TestIPC.TestServer(1, false, conf); @@ -465,7 +473,7 @@ public void testInitialCallRetryCount() throws IOException, public void run() { // we have not set the retry count for the client, thus on the server // side we should see retry count as 0 - Assert.assertEquals(0, Server.getCallRetryCount()); + assertEquals(0, Server.getCallRetryCount()); } }; @@ -488,7 +496,8 @@ public void run() { * @throws InterruptedException * @throws ExecutionException */ - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testUniqueSequentialCallIds() throws IOException, InterruptedException, ExecutionException { int serverThreads = 10, callerCount = 100, perCallerCallCount = 100; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java index 545ddb40ff5fe..bc607d762a3cd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java @@ -18,16 +18,18 @@ package org.apache.hadoop.ipc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.reset; import java.util.ArrayList; import java.util.HashMap; @@ -38,8 +40,8 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException; import org.apache.hadoop.security.UserGroupInformation; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestCallQueueManager { private CallQueueManager manager; @@ -261,7 +263,8 @@ public void testSchedulerWithoutFCQ() throws InterruptedException { assertCanPut(manager, 0, 1); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testSwapUnderContention() throws InterruptedException { manager = new CallQueueManager(queueClass, schedulerClass, false, 5000, "", conf); @@ -438,12 +441,12 @@ public void testSchedulerConstructorException() throws InterruptedException { @SuppressWarnings("unchecked") @Test public void testCallQueueOverflowExceptions() throws Exception { - RpcScheduler scheduler = Mockito.mock(RpcScheduler.class); - BlockingQueue queue = Mockito.mock(BlockingQueue.class); + RpcScheduler scheduler = mock(RpcScheduler.class); + BlockingQueue queue = mock(BlockingQueue.class); CallQueueManager cqm = - Mockito.spy(new CallQueueManager<>(queue, scheduler, false, false)); + spy(new CallQueueManager<>(queue, scheduler, false, false)); CallQueueManager cqmTriggerFailover = - Mockito.spy(new CallQueueManager<>(queue, scheduler, false, true)); + spy(new CallQueueManager<>(queue, scheduler, false, true)); Schedulable call = new FakeCall(0); // call queue exceptions that trigger failover @@ -473,7 +476,7 @@ public void testCallQueueOverflowExceptions() throws Exception { cqm.add(call); fail("didn't throw"); } catch (Exception ex) { - assertTrue(ex.toString(), ex instanceof CallQueueOverflowException); + assertTrue(ex instanceof CallQueueOverflowException, ex.toString()); } // backoff disabled, put is put to queue. @@ -500,7 +503,7 @@ public void testCallQueueOverflowExceptions() throws Exception { cqm.put(call); fail("didn't fail"); } catch (Exception ex) { - assertTrue(ex.toString(), ex instanceof CallQueueOverflowException); + assertTrue(ex instanceof CallQueueOverflowException, ex.toString()); } verify(queue, times(0)).put(call); verify(queue, times(0)).add(call); @@ -513,7 +516,7 @@ public void testCallQueueOverflowExceptions() throws Exception { cqm.add(call); fail("didn't fail"); } catch (Exception ex) { - assertTrue(ex.toString(), ex instanceof CallQueueOverflowException); + assertTrue(ex instanceof CallQueueOverflowException, ex.toString()); } verify(queue, times(0)).put(call); verify(queue, times(0)).add(call); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallerContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallerContext.java index bb4a119e7db29..a995b2232ecba 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallerContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallerContext.java @@ -18,10 +18,11 @@ package org.apache.hadoop.ipc; import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_SEPARATOR_KEY; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; public class TestCallerContext { @Test @@ -31,14 +32,14 @@ public void testBuilderAppend() { CallerContext.Builder builder = new CallerContext.Builder(null, conf); CallerContext context = builder.append("context1") .append("context2").append("key3", "value3").build(); - Assert.assertEquals(true, + assertEquals(true, context.getContext().contains("$")); String[] items = context.getContext().split("\\$"); - Assert.assertEquals(3, items.length); - Assert.assertEquals("key3:value3", items[2]); + assertEquals(3, items.length); + assertEquals("key3:value3", items[2]); builder.append("$$"); - Assert.assertEquals("context1$context2$key3:value3$$$", + assertEquals("context1$context2$key3:value3$$$", builder.build().getContext()); } @@ -48,37 +49,39 @@ public void testBuilderAppendIfAbsent() { conf.set(HADOOP_CALLER_CONTEXT_SEPARATOR_KEY, "$"); CallerContext.Builder builder = new CallerContext.Builder(null, conf); builder.append("key1", "value1"); - Assert.assertEquals("key1:value1", + assertEquals("key1:value1", builder.build().getContext()); // Append an existed key with different value. builder.appendIfAbsent("key1", "value2"); String[] items = builder.build().getContext().split("\\$"); - Assert.assertEquals(1, items.length); - Assert.assertEquals("key1:value1", + assertEquals(1, items.length); + assertEquals("key1:value1", builder.build().getContext()); // Append an absent key. builder.appendIfAbsent("key2", "value2"); String[] items2 = builder.build().getContext().split("\\$"); - Assert.assertEquals(2, items2.length); - Assert.assertEquals("key1:value1$key2:value2", + assertEquals(2, items2.length); + assertEquals("key1:value1$key2:value2", builder.build().getContext()); // Append a key that is a substring of an existing key. builder.appendIfAbsent("key", "value"); String[] items3 = builder.build().getContext().split("\\$"); - Assert.assertEquals(3, items3.length); - Assert.assertEquals("key1:value1$key2:value2$key:value", + assertEquals(3, items3.length); + assertEquals("key1:value1$key2:value2$key:value", builder.build().getContext()); } - @Test(expected = IllegalArgumentException.class) + @Test public void testNewBuilder() { - Configuration conf = new Configuration(); - // Set illegal separator. - conf.set(HADOOP_CALLER_CONTEXT_SEPARATOR_KEY, "\t"); - CallerContext.Builder builder = new CallerContext.Builder(null, conf); - builder.build(); + assertThrows(IllegalArgumentException.class, () -> { + Configuration conf = new Configuration(); + // Set illegal separator. + conf.set(HADOOP_CALLER_CONTEXT_SEPARATOR_KEY, "\t"); + CallerContext.Builder builder = new CallerContext.Builder(null, conf); + builder.build(); + }); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestDecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestDecayRpcScheduler.java index 4ae3de1b15873..0afae0be4378f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestDecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestDecayRpcScheduler.java @@ -22,13 +22,15 @@ import java.util.Map; import org.eclipse.jetty.util.ajax.JSON; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import static org.apache.hadoop.ipc.DecayRpcScheduler.IPC_DECAYSCHEDULER_THRESHOLDS_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -80,14 +82,18 @@ public long getCost(ProcessingDetails details) { private DecayRpcScheduler scheduler; - @Test(expected=IllegalArgumentException.class) + @Test public void testNegativeScheduler() { - scheduler = new DecayRpcScheduler(-1, "", new Configuration()); + assertThrows(IllegalArgumentException.class, () -> { + scheduler = new DecayRpcScheduler(-1, "", new Configuration()); + }); } - @Test(expected=IllegalArgumentException.class) + @Test public void testZeroScheduler() { - scheduler = new DecayRpcScheduler(0, "", new Configuration()); + assertThrows(IllegalArgumentException.class, () -> { + scheduler = new DecayRpcScheduler(0, "", new Configuration()); + }); } @Test @@ -292,17 +298,18 @@ public void testPriority() throws Exception { "Hadoop:service="+ namespace + ",name=DecayRpcScheduler"); String cvs1 = (String) mbs.getAttribute(mxbeanName, "CallVolumeSummary"); - assertTrue("Get expected JMX of CallVolumeSummary before decay", - cvs1.equals("{\"A\":6,\"B\":2,\"C\":2}")); + assertTrue(cvs1.equals("{\"A\":6,\"B\":2,\"C\":2}"), + "Get expected JMX of CallVolumeSummary before decay"); scheduler.forceDecay(); String cvs2 = (String) mbs.getAttribute(mxbeanName, "CallVolumeSummary"); - assertTrue("Get expected JMX for CallVolumeSummary after decay", - cvs2.equals("{\"A\":3,\"B\":1,\"C\":1}")); + assertTrue(cvs2.equals("{\"A\":3,\"B\":1,\"C\":1}"), + "Get expected JMX for CallVolumeSummary after decay"); } - @Test(timeout=2000) + @Test + @Timeout(value = 2) @SuppressWarnings("deprecation") public void testPeriodic() throws InterruptedException { Configuration conf = new Configuration(); @@ -325,7 +332,8 @@ public void testPeriodic() throws InterruptedException { } } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testNPEatInitialization() throws InterruptedException { // redirect the LOG to and check if there is NPE message while initializing // the DecayRpcScheduler diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java index 06b65dc4df3c5..1afc88c562c8e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java @@ -28,15 +28,15 @@ import static org.mockito.Mockito.when; import static org.mockito.Mockito.times; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; import javax.management.MBeanServer; import javax.management.ObjectName; @@ -77,7 +77,7 @@ private Schedulable mockCall(String id) { } @SuppressWarnings("deprecation") - @Before + @BeforeEach public void setUp() { Configuration conf = new Configuration(); conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2); @@ -407,21 +407,21 @@ public void testInsertion() throws Exception { private void checkOverflowException(Exception ex, RpcStatusProto status, boolean failOverTriggered) { // should be an overflow exception - assertTrue(ex.getClass().getName() + " != CallQueueOverflowException", - ex instanceof CallQueueOverflowException); + assertTrue(ex instanceof CallQueueOverflowException, + ex.getClass().getName() + " != CallQueueOverflowException"); IOException ioe = ((CallQueueOverflowException)ex).getCause(); assertNotNull(ioe); - assertTrue(ioe.getClass().getName() + " != RpcServerException", - ioe instanceof RpcServerException); + assertTrue(ioe instanceof RpcServerException, + ioe.getClass().getName() + " != RpcServerException"); RpcServerException rse = (RpcServerException)ioe; // check error/fatal status and if it embeds a retriable ex or standby ex. assertEquals(status, rse.getRpcStatusProto()); if (failOverTriggered) { - assertTrue(rse.getClass().getName() + " != RetriableException", - rse.getCause() instanceof StandbyException); + assertTrue(rse.getCause() instanceof StandbyException, + rse.getClass().getName() + " != RetriableException"); } else { - assertTrue(rse.getClass().getName() + " != RetriableException", - rse.getCause() instanceof RetriableException); + assertTrue(rse.getCause() instanceof RetriableException, + rse.getClass().getName() + " != RetriableException"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index 9165c71eb41bf..a191095b44516 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -18,19 +18,28 @@ package org.apache.hadoop.ipc; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.reset; import java.io.ByteArrayOutputStream; import java.io.DataInput; @@ -94,12 +103,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.util.StringUtils; -import org.assertj.core.api.Condition; -import org.junit.Assert; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -110,8 +116,6 @@ import org.slf4j.LoggerFactory; import org.slf4j.event.Level; -import static org.assertj.core.api.Assertions.assertThat; - /** Unit tests for IPC. */ public class TestIPC { public static final Logger LOG = LoggerFactory.getLogger(TestIPC.class); @@ -126,7 +130,7 @@ public class TestIPC { static boolean WRITABLE_FAULTS_ENABLED = true; static int WRITABLE_FAULTS_SLEEP = 0; - @Before + @BeforeEach public void setupConf() { conf = new Configuration(); Client.setPingInterval(conf, PING_INTERVAL); @@ -339,7 +343,8 @@ public Object invoke(Object proxy, Method method, Object[] args) } } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testSerial() throws IOException, InterruptedException { internalTestSerial(3, false, 2, 5, 100); internalTestSerial(3, true, 2, 5, 10); @@ -403,7 +408,8 @@ public void testAuxiliaryPorts() throws IOException, InterruptedException { server.stop(); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testStandAloneClient() throws IOException { Client client = new Client(LongWritable.class, conf); InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10); @@ -413,13 +419,13 @@ public void testStandAloneClient() throws IOException { } catch (IOException e) { String message = e.getMessage(); String addressText = address.getHostName() + ":" + address.getPort(); - assertTrue("Did not find "+addressText+" in "+message, - message.contains(addressText)); + assertTrue(message.contains(addressText), + "Did not find "+addressText+" in "+message); Throwable cause=e.getCause(); - assertNotNull("No nested exception in "+e,cause); + assertNotNull(cause, "No nested exception in "+e); String causeText=cause.getMessage(); - assertTrue("Did not find " + causeText + " in " + message, - message.contains(causeText)); + assertTrue(message.contains(causeText), + "Did not find " + causeText + " in " + message); } finally { client.stop(); } @@ -539,7 +545,8 @@ private void doErrorTest( } } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIOEOnClientWriteParam() throws Exception { doErrorTest(IOEOnWriteWritable.class, LongWritable.class, @@ -547,7 +554,8 @@ public void testIOEOnClientWriteParam() throws Exception { LongWritable.class); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testRTEOnClientWriteParam() throws Exception { doErrorTest(RTEOnWriteWritable.class, LongWritable.class, @@ -555,7 +563,8 @@ public void testRTEOnClientWriteParam() throws Exception { LongWritable.class); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIOEOnServerReadParam() throws Exception { doErrorTest(LongWritable.class, IOEOnReadWritable.class, @@ -563,7 +572,8 @@ public void testIOEOnServerReadParam() throws Exception { LongWritable.class); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testRTEOnServerReadParam() throws Exception { doErrorTest(LongWritable.class, RTEOnReadWritable.class, @@ -572,7 +582,8 @@ public void testRTEOnServerReadParam() throws Exception { } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIOEOnServerWriteResponse() throws Exception { doErrorTest(LongWritable.class, LongWritable.class, @@ -580,7 +591,8 @@ public void testIOEOnServerWriteResponse() throws Exception { LongWritable.class); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testRTEOnServerWriteResponse() throws Exception { doErrorTest(LongWritable.class, LongWritable.class, @@ -588,7 +600,8 @@ public void testRTEOnServerWriteResponse() throws Exception { LongWritable.class); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIOEOnClientReadResponse() throws Exception { doErrorTest(LongWritable.class, LongWritable.class, @@ -596,7 +609,8 @@ public void testIOEOnClientReadResponse() throws Exception { IOEOnReadWritable.class); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testRTEOnClientReadResponse() throws Exception { doErrorTest(LongWritable.class, LongWritable.class, @@ -609,7 +623,8 @@ public void testRTEOnClientReadResponse() throws Exception { * that a ping should have been sent. This is a reproducer for a * deadlock seen in one iteration of HADOOP-6762. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIOEOnWriteAfterPingClient() throws Exception { // start server Client.setPingInterval(conf, 100); @@ -628,8 +643,8 @@ public void testIOEOnWriteAfterPingClient() throws Exception { private static void assertExceptionContains( Throwable t, String substring) { String msg = StringUtils.stringifyException(t); - assertTrue("Exception should contain substring '" + substring + "':\n" + - msg, msg.contains(substring)); + assertTrue(msg.contains(substring), + "Exception should contain substring '" + substring + "':\n" + msg); LOG.info("Got expected exception", t); } @@ -637,7 +652,8 @@ private static void assertExceptionContains( * Test that, if the socket factory throws an IOE, it properly propagates * to the client. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testSocketFactoryException() throws IOException { SocketFactory mockFactory = mock(SocketFactory.class); doThrow(new IOException("Injected fault")).when(mockFactory).createSocket(); @@ -670,12 +686,13 @@ public synchronized void setSoTimeout(int timeout) { * failure is handled properly. This is a regression test for * HADOOP-7428. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testRTEDuringConnectionSetup() throws IOException { // Set up a socket factory which returns sockets which // throw an RTE when setSoTimeout is called. SocketFactory spyFactory = spy(NetUtils.getDefaultSocketFactory(conf)); - Mockito.doAnswer(new Answer() { + doAnswer(new Answer() { @Override public Socket answer(InvocationOnMock invocation) { return new MockSocket(); @@ -699,7 +716,7 @@ public Socket answer(InvocationOnMock invocation) { // Resetting to the normal socket behavior should succeed // (i.e. it should not have cached a half-constructed connection) - Mockito.reset(spyFactory); + reset(spyFactory); call(client, RANDOM.nextLong(), address, conf); } finally { client.stop(); @@ -707,7 +724,8 @@ public Socket answer(InvocationOnMock invocation) { } } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcTimeout() throws IOException { // start server Server server = new TestServer(1, true); @@ -730,7 +748,8 @@ public void testIpcTimeout() throws IOException { client.stop(); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcConnectTimeout() throws IOException { // start server Server server = new TestServer(1, true); @@ -754,7 +773,8 @@ public void testIpcConnectTimeout() throws IOException { /** * Check service class byte in IPC header is correct on wire. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcWithServiceClass() throws IOException { // start server Server server = new TestServer(5, false); @@ -800,7 +820,8 @@ public Writable call(RPC.RpcKind rpcKind, String protocol, Writable param, } } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcHostResolutionTimeout() throws Exception { final InetSocketAddress addr = new InetSocketAddress("host.invalid", 80); @@ -898,7 +919,8 @@ public void testStableHashCode() throws IOException { } } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcFlakyHostResolution() throws IOException { // start server Server server = new TestServer(5, false); @@ -929,7 +951,8 @@ public void testIpcFlakyHostResolution() throws IOException { * @throws BrokenBarrierException * @throws InterruptedException */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcWithReaderQueuing() throws Exception { // 1 reader, 1 connectionQ slot, 1 callq for (int i=0; i < 10; i++) { @@ -1058,7 +1081,8 @@ public void run() { server.stop(); } - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testConnectionIdleTimeouts() throws Exception { GenericTestUtils.setLogLevel(Server.LOG, Level.DEBUG); final int maxIdle = 1000; @@ -1176,37 +1200,40 @@ private static void callAndVerify(Server server, InetSocketAddress addr, call(client, addr, serviceClass, conf); Connection connection = server.getConnections()[0]; LOG.info("Connection is from: {}", connection); - assertEquals( - "Connection string representation should include only IP address for healthy connection", 1, - connection.toString().split(" / ").length); + assertEquals(1, connection.toString().split(" / ").length, + "Connection string representation should include only IP address for healthy connection"); int serviceClass2 = connection.getServiceClass(); assertFalse(noChanged ^ serviceClass == serviceClass2); client.stop(); } - - @Test(timeout=30000, expected=IOException.class) + + @Test + @Timeout(value = 30) public void testIpcAfterStopping() throws IOException { - // start server - Server server = new TestServer(5, false); - InetSocketAddress addr = NetUtils.getConnectAddress(server); - server.start(); + assertThrows(IOException.class, () -> { + // start server + Server server = new TestServer(5, false); + InetSocketAddress addr = NetUtils.getConnectAddress(server); + server.start(); - // start client - Client client = new Client(LongWritable.class, conf); - call(client, addr, 0, conf); - client.stop(); - - // This call should throw IOException. - call(client, addr, 0, conf); + // start client + Client client = new Client(LongWritable.class, conf); + call(client, addr, 0, conf); + client.stop(); + + // This call should throw IOException. + call(client, addr, 0, conf); + }); } /** * Check that file descriptors aren't leaked by starting * and stopping IPC servers. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testSocketLeak() throws IOException { - Assume.assumeTrue(FD_DIR.exists()); // only run on Linux + assumeTrue(FD_DIR.exists()); // only run on Linux long startFds = countOpenFileDescriptors(); for (int i = 0; i < 50; i++) { @@ -1216,15 +1243,16 @@ public void testSocketLeak() throws IOException { } long endFds = countOpenFileDescriptors(); - assertTrue("Leaked " + (endFds - startFds) + " file descriptors", - endFds - startFds < 20); + assertTrue(endFds - startFds < 20, + "Leaked " + (endFds - startFds) + " file descriptors"); } /** * Check if Client is interrupted after handling * InterruptedException during cleanup */ - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testInterrupted() { Client client = new Client(LongWritable.class, conf); Thread.currentThread().interrupt(); @@ -1234,7 +1262,7 @@ public void testInterrupted() { LOG.info("Expected thread interrupt during client cleanup"); } catch (AssertionError e) { LOG.error("The Client did not interrupt after handling an Interrupted Exception"); - Assert.fail("The Client did not interrupt after handling an Interrupted Exception"); + fail("The Client did not interrupt after handling an Interrupted Exception"); } // Clear Thread interrupt Thread.interrupted(); @@ -1244,31 +1272,36 @@ private long countOpenFileDescriptors() { return FD_DIR.list().length; } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcFromHadoop_0_18_13() throws IOException { doIpcVersionTest(NetworkTraces.HADOOP_0_18_3_RPC_DUMP, NetworkTraces.RESPONSE_TO_HADOOP_0_18_3_RPC); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcFromHadoop0_20_3() throws IOException { doIpcVersionTest(NetworkTraces.HADOOP_0_20_3_RPC_DUMP, NetworkTraces.RESPONSE_TO_HADOOP_0_20_3_RPC); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testIpcFromHadoop0_21_0() throws IOException { doIpcVersionTest(NetworkTraces.HADOOP_0_21_0_RPC_DUMP, NetworkTraces.RESPONSE_TO_HADOOP_0_21_0_RPC); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testHttpGetResponse() throws IOException { doIpcVersionTest("GET / HTTP/1.0\r\n\r\n".getBytes(), Server.RECEIVED_HTTP_REQ_RESPONSE.getBytes()); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testConnectionRetriesOnSocketTimeoutExceptions() throws IOException { Configuration conf = new Configuration(); // set max retries to 0 @@ -1294,7 +1327,8 @@ static class CallInfo { * (1) the rpc server uses the call id/retry provided by the rpc client, and * (2) the rpc client receives the same call id/retry from the rpc server. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testCallIdAndRetry() throws IOException { final CallInfo info = new CallInfo(); @@ -1311,8 +1345,8 @@ Call createCall(RpcKind rpcKind, Writable rpcRequest) { @Override void checkResponse(RpcResponseHeaderProto header) throws IOException { super.checkResponse(header); - Assert.assertEquals(info.id, header.getCallId()); - Assert.assertEquals(info.retry, header.getRetryCount()); + assertEquals(info.id, header.getCallId()); + assertEquals(info.retry, header.getRetryCount()); } }; @@ -1321,8 +1355,8 @@ void checkResponse(RpcResponseHeaderProto header) throws IOException { server.callListener = new Runnable() { @Override public void run() { - Assert.assertEquals(info.id, Server.getCallId()); - Assert.assertEquals(info.retry, Server.getCallRetryCount()); + assertEquals(info.id, Server.getCallId()); + assertEquals(info.retry, Server.getCallRetryCount()); } }; @@ -1343,11 +1377,12 @@ public void run() { * caller is notified. * @throws IOException */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testReceiveStateBeforeCallerNotification() throws IOException { AtomicBoolean stateReceived = new AtomicBoolean(false); - AlignmentContext alignmentContext = Mockito.mock(AlignmentContext.class); - Mockito.doAnswer((Answer) invocation -> { + AlignmentContext alignmentContext = mock(AlignmentContext.class); + doAnswer((Answer) invocation -> { Thread.sleep(1000); stateReceived.set(true); return null; @@ -1362,7 +1397,7 @@ public void testReceiveStateBeforeCallerNotification() throws IOException { server.start(); call(client, new LongWritable(RANDOM.nextLong()), addr, 0, conf, alignmentContext); - Assert.assertTrue(stateReceived.get()); + assertTrue(stateReceived.get()); } finally { client.stop(); server.stop(); @@ -1378,7 +1413,8 @@ interface DummyProtocol { /** * Test the retry count while used in a retry proxy. */ - @Test(timeout=100000) + @Test + @Timeout(value = 100) public void testRetryProxy() throws IOException { final Client client = new Client(LongWritable.class, conf); @@ -1387,7 +1423,7 @@ public void testRetryProxy() throws IOException { private int retryCount = 0; @Override public void run() { - Assert.assertEquals(retryCount++, Server.getCallRetryCount()); + assertEquals(retryCount++, Server.getCallRetryCount()); } }; @@ -1404,7 +1440,7 @@ public void run() { try { server.start(); retryProxy.dummyRun(); - Assert.assertEquals(TestInvocationHandler.retry, totalRetry + 1); + assertEquals(TestInvocationHandler.retry, totalRetry + 1); } finally { Client.setCallIdAndRetryCount(0, 0, null); client.stop(); @@ -1416,39 +1452,41 @@ public void run() { * Test that there is no retry when invalid token exception is thrown. * Verfies fix for HADOOP-12054 */ - @Test(expected = InvalidToken.class) + @Test public void testNoRetryOnInvalidToken() throws IOException { - final Client client = new Client(LongWritable.class, conf); - final TestServer server = new TestServer(1, false); - TestInvalidTokenHandler handler = - new TestInvalidTokenHandler(client, server); - DummyProtocol proxy = (DummyProtocol) Proxy.newProxyInstance( - DummyProtocol.class.getClassLoader(), - new Class[] { DummyProtocol.class }, handler); - FailoverProxyProvider provider = - new DefaultFailoverProxyProvider( - DummyProtocol.class, proxy); - DummyProtocol retryProxy = - (DummyProtocol) RetryProxy.create(DummyProtocol.class, provider, - RetryPolicies.failoverOnNetworkException( - RetryPolicies.TRY_ONCE_THEN_FAIL, 100, 100, 10000, 0)); + assertThrows(InvalidToken.class, () -> { + final Client client = new Client(LongWritable.class, conf); + final TestServer server = new TestServer(1, false); + TestInvalidTokenHandler handler = + new TestInvalidTokenHandler(client, server); + DummyProtocol proxy = (DummyProtocol) Proxy.newProxyInstance( + DummyProtocol.class.getClassLoader(), + new Class[]{DummyProtocol.class}, handler); + FailoverProxyProvider provider = + new DefaultFailoverProxyProvider<>(DummyProtocol.class, proxy); + DummyProtocol retryProxy = + (DummyProtocol) RetryProxy.create(DummyProtocol.class, provider, + RetryPolicies.failoverOnNetworkException( + RetryPolicies.TRY_ONCE_THEN_FAIL, 100, 100, 10000, 0)); - try { - server.start(); - retryProxy.dummyRun(); - } finally { - // Check if dummyRun called only once - assertThat(handler.invocations).isOne(); - Client.setCallIdAndRetryCount(0, 0, null); - client.stop(); - server.stop(); - } + try { + server.start(); + retryProxy.dummyRun(); + } finally { + // Check if dummyRun called only once + assertThat(handler.invocations).isOne(); + Client.setCallIdAndRetryCount(0, 0, null); + client.stop(); + server.stop(); + } + }); } /** * Test if the rpc server gets the default retry count (0) from client. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testInitialCallRetryCount() throws IOException { // Override client to store the call id final Client client = new Client(LongWritable.class, conf); @@ -1460,7 +1498,7 @@ public void testInitialCallRetryCount() throws IOException { public void run() { // we have not set the retry count for the client, thus on the server // side we should see retry count as 0 - Assert.assertEquals(0, Server.getCallRetryCount()); + assertEquals(0, Server.getCallRetryCount()); } }; @@ -1479,7 +1517,8 @@ public void run() { /** * Test if the rpc server gets the retry count from client. */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testCallRetryCount() throws IOException { final int retryCount = 255; // Override client to store the call id @@ -1493,7 +1532,7 @@ public void testCallRetryCount() throws IOException { public void run() { // we have not set the retry count for the client, thus on the server // side we should see retry count as 0 - Assert.assertEquals(retryCount, Server.getCallRetryCount()); + assertEquals(retryCount, Server.getCallRetryCount()); } }; @@ -1514,7 +1553,8 @@ public void run() { * even if multiple threads are using the same client. * @throws InterruptedException */ - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testUniqueSequentialCallIds() throws IOException, InterruptedException { int serverThreads = 10, callerCount = 100, perCallerCallCount = 100; @@ -1623,10 +1663,11 @@ public void testClientGetTimeout() throws IOException { assertThat(Client.getTimeout(config)).isEqualTo(-1); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testSetupConnectionShouldNotBlockShutdown() throws Exception { // Start server - SocketFactory mockFactory = Mockito.mock(SocketFactory.class); + SocketFactory mockFactory = mock(SocketFactory.class); Server server = new TestServer(1, true); final InetSocketAddress addr = NetUtils.getConnectAddress(server); @@ -1669,7 +1710,7 @@ public Boolean get() { private void assertRetriesOnSocketTimeouts(Configuration conf, int maxTimeoutRetries) throws IOException { - SocketFactory mockFactory = Mockito.mock(SocketFactory.class); + SocketFactory mockFactory = mock(SocketFactory.class); doThrow(new ConnectTimeoutException("fake")).when(mockFactory).createSocket(); Client client = new Client(LongWritable.class, conf, mockFactory); InetSocketAddress address = new InetSocketAddress("127.0.0.1", 9090); @@ -1677,18 +1718,20 @@ private void assertRetriesOnSocketTimeouts(Configuration conf, call(client, RANDOM.nextLong(), address, conf); fail("Not throwing the SocketTimeoutException"); } catch (SocketTimeoutException e) { - Mockito.verify(mockFactory, Mockito.times(maxTimeoutRetries)) + verify(mockFactory, times(maxTimeoutRetries)) .createSocket(); } client.stop(); } - @Test(timeout=4000) + @Test + @Timeout(value = 4) public void testInsecureVersionMismatch() throws IOException { checkVersionMismatch(); } - @Test(timeout=4000) + @Test + @Timeout(value = 4) public void testSecureVersionMismatch() throws IOException { SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf); UserGroupInformation.setConfiguration(conf); @@ -1722,13 +1765,13 @@ public void run() { Client client = new Client(LongWritable.class, conf); call(client, 0, addr, conf); } catch (RemoteException re) { - Assert.assertEquals(RPC.VersionMismatch.class.getName(), + assertEquals(RPC.VersionMismatch.class.getName(), re.getClassName()); - Assert.assertEquals(NetworkTraces.HADOOP0_20_ERROR_MSG, + assertEquals(NetworkTraces.HADOOP0_20_ERROR_MSG, re.getMessage()); return; } - Assert.fail("didn't get version mismatch"); + fail("didn't get version mismatch"); } } @@ -1747,13 +1790,13 @@ public void testRpcResponseLimit() throws Throwable { try { call(client, 0, addr, conf); } catch (IOException ioe) { - Assert.assertNotNull(ioe); - Assert.assertEquals(RpcException.class, ioe.getClass()); - Assert.assertTrue(ioe.getMessage().contains( + assertNotNull(ioe); + assertEquals(RpcException.class, ioe.getClass()); + assertTrue(ioe.getMessage().contains( "exceeds maximum data length")); return; } - Assert.fail("didn't get limit exceeded"); + fail("didn't get limit exceeded"); } @Test @@ -1766,13 +1809,14 @@ public void testProxyUserBinding() throws Exception { checkUserBinding(true); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testUpdateAddressEnsureResolved() throws Exception { // start server Server server = new TestServer(1, false); server.start(); - SocketFactory mockFactory = Mockito.mock(SocketFactory.class); + SocketFactory mockFactory = mock(SocketFactory.class); doThrow(new ConnectTimeoutException("fake")).when(mockFactory) .createSocket(); Client client = new Client(LongWritable.class, conf, mockFactory); @@ -1811,16 +1855,16 @@ private void checkUserBinding(boolean asProxy) throws Exception { Socket s; // don't attempt bind with no service host. s = checkConnect(null, asProxy); - Mockito.verify(s, Mockito.never()).bind(any(SocketAddress.class)); + verify(s, never()).bind(any(SocketAddress.class)); // don't attempt bind with service host not belonging to this host. s = checkConnect("1.2.3.4", asProxy); - Mockito.verify(s, Mockito.never()).bind(any(SocketAddress.class)); + verify(s, never()).bind(any(SocketAddress.class)); // do attempt bind when service host is this host. InetAddress addr = InetAddress.getLocalHost(); s = checkConnect(addr.getHostAddress(), asProxy); - Mockito.verify(s).bind(new InetSocketAddress(addr, 0)); + verify(s).bind(new InetSocketAddress(addr, 0)); } // dummy protocol that claims to support kerberos. @@ -1838,7 +1882,7 @@ private Socket checkConnect(String addr, boolean asProxy) throws Exception { principal.append("@REALM"); UserGroupInformation ugi = spy(UserGroupInformation.createRemoteUser(principal.toString())); - Mockito.doReturn(true).when(ugi).hasKerberosCredentials(); + doReturn(true).when(ugi).hasKerberosCredentials(); if (asProxy) { ugi = UserGroupInformation.createProxyUser("proxy", ugi); } @@ -1846,11 +1890,11 @@ private Socket checkConnect(String addr, boolean asProxy) throws Exception { // create a mock socket that throws on connect. SocketException expectedConnectEx = new SocketException("Expected connect failure"); - Socket s = Mockito.mock(Socket.class); - SocketFactory mockFactory = Mockito.mock(SocketFactory.class); - Mockito.doReturn(s).when(mockFactory).createSocket(); + Socket s = mock(Socket.class); + SocketFactory mockFactory = mock(SocketFactory.class); + doReturn(s).when(mockFactory).createSocket(); doThrow(expectedConnectEx).when(s).connect( - any(SocketAddress.class), Mockito.anyInt()); + any(SocketAddress.class), anyInt()); // do a dummy call and expect it to throw an exception on connect. // tests should verify if/how a bind occurred. @@ -1864,7 +1908,7 @@ private Socket checkConnect(String addr, boolean asProxy) throws Exception { fail("call didn't throw connect exception"); } catch (SocketException se) { // ipc layer re-wraps exceptions, so check the cause. - Assert.assertSame(expectedConnectEx, se.getCause()); + assertSame(expectedConnectEx, se.getCause()); } return s; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java index 7d7905e6b4674..fa9a1606539ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java @@ -18,7 +18,10 @@ package org.apache.hadoop.ipc; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.net.InetSocketAddress; @@ -41,8 +44,8 @@ import org.apache.hadoop.ipc.RPC.RpcKind; import org.apache.hadoop.ipc.Server.Call; import org.apache.hadoop.net.NetUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -189,7 +192,8 @@ public void checkServerResponder(final int handlerCount, // call 4: sendResponse, should remain blocked // call 5: immediate, prove handler is still free // call 4: sendResponse, expect it to return - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testDeferResponse() throws IOException, InterruptedException { final AtomicReference deferredCall = new AtomicReference(); final AtomicInteger count = new AtomicInteger(); @@ -234,11 +238,11 @@ public Integer call() throws IOException { // make sure it blocked try { future1.get(1, TimeUnit.SECONDS); - Assert.fail("ipc shouldn't have responded"); + fail("ipc shouldn't have responded"); } catch (TimeoutException te) { // ignore, expected } catch (Exception ex) { - Assert.fail("unexpected exception:"+ex); + fail("unexpected exception:"+ex); } assertFalse(future1.isDone()); waitingCalls[0] = deferredCall.get(); @@ -259,11 +263,11 @@ public Integer call() throws IOException { // make sure it blocked try { future2.get(1, TimeUnit.SECONDS); - Assert.fail("ipc shouldn't have responded"); + fail("ipc shouldn't have responded"); } catch (TimeoutException te) { // ignore, expected } catch (Exception ex) { - Assert.fail("unexpected exception:"+ex); + fail("unexpected exception:"+ex); } assertFalse(future2.isDone()); waitingCalls[1] = deferredCall.get(); @@ -280,17 +284,17 @@ public Integer call() throws IOException { int val = future1.get(1, TimeUnit.SECONDS); assertEquals(2, val); } catch (Exception ex) { - Assert.fail("unexpected exception:"+ex); + fail("unexpected exception:"+ex); } // make sure it's still blocked try { future2.get(1, TimeUnit.SECONDS); - Assert.fail("ipc shouldn't have responded"); + fail("ipc shouldn't have responded"); } catch (TimeoutException te) { // ignore, expected } catch (Exception ex) { - Assert.fail("unexpected exception:"+ex); + fail("unexpected exception:"+ex); } assertFalse(future2.isDone()); @@ -303,7 +307,7 @@ public Integer call() throws IOException { int val = future2.get(1, TimeUnit.SECONDS); assertEquals(4, val); } catch (Exception ex) { - Assert.fail("unexpected exception:"+ex); + fail("unexpected exception:"+ex); } server.stop(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIdentityProviders.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIdentityProviders.java index b528186ad26a5..396014aa46874 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIdentityProviders.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIdentityProviders.java @@ -18,12 +18,12 @@ package org.apache.hadoop.ipc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.assertj.core.api.Assertions.assertThat; import org.apache.hadoop.test.LambdaTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.List; import java.io.IOException; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMiniRPCBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMiniRPCBenchmark.java index a130fa9757a92..dd63b8b5d3766 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMiniRPCBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMiniRPCBenchmark.java @@ -18,7 +18,7 @@ package org.apache.hadoop.ipc; import org.apache.hadoop.conf.Configuration; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.event.Level; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java index c1b0858697682..34ce1b2c30e36 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java @@ -18,22 +18,22 @@ package org.apache.hadoop.ipc; import org.apache.hadoop.conf.Configuration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestMultipleProtocolServer extends TestRpcBase { private static RPC.Server server; - @Before + @BeforeEach public void setUp() throws Exception { super.setupConf(); server = setupTestServer(conf, 2); } - @After + @AfterEach public void tearDown() throws Exception { server.stop(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProcessingDetails.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProcessingDetails.java index 0ecc741b014b3..9858f6f090754 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProcessingDetails.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProcessingDetails.java @@ -18,12 +18,12 @@ package org.apache.hadoop.ipc; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.TimeUnit; import static org.apache.hadoop.ipc.ProcessingDetails.Timing; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Unit tests for ProcessingDetails time unit conversion and output. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java index d813c6b784f5d..4aa21890e3ed3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.ipc; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.net.InetSocketAddress; @@ -34,8 +36,7 @@ import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.NewProtobufRpcProto; import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.NewerProtobufRpcProto; import org.apache.hadoop.net.NetUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.thirdparty.protobuf.BlockingService; import org.apache.hadoop.thirdparty.protobuf.RpcController; @@ -71,8 +72,8 @@ public EmptyResponseProto ping(RpcController unused, EmptyRequestProto request) throws ServiceException { // Ensure clientId is received byte[] clientId = Server.getClientId(); - Assert.assertNotNull(Server.getClientId()); - Assert.assertEquals(16, clientId.length); + assertNotNull(Server.getClientId()); + assertEquals(16, clientId.length); return EmptyResponseProto.newBuilder().build(); } @@ -81,8 +82,8 @@ public EmptyResponseProto echo(RpcController unused, EmptyRequestProto request) throws ServiceException { // Ensure clientId is received byte[] clientId = Server.getClientId(); - Assert.assertNotNull(Server.getClientId()); - Assert.assertEquals(16, clientId.length); + assertNotNull(Server.getClientId()); + assertEquals(16, clientId.length); return EmptyResponseProto.newBuilder().build(); } } @@ -94,8 +95,8 @@ public EmptyResponseProto ping(RpcController unused, EmptyRequestProto request) throws ServiceException { // Ensure clientId is received byte[] clientId = Server.getClientId(); - Assert.assertNotNull(Server.getClientId()); - Assert.assertEquals(16, clientId.length); + assertNotNull(Server.getClientId()); + assertEquals(16, clientId.length); return EmptyResponseProto.newBuilder().build(); } @@ -115,8 +116,8 @@ public EmptyResponseProto ping(RpcController unused, EmptyRequestProto request) throws ServiceException { // Ensure clientId is received byte[] clientId = Server.getClientId(); - Assert.assertNotNull(Server.getClientId()); - Assert.assertEquals(16, clientId.length); + assertNotNull(Server.getClientId()); + assertEquals(16, clientId.length); return EmptyResponseProto.newBuilder().build(); } @@ -125,8 +126,8 @@ public EmptyResponseProto echo(RpcController unused, EmptyRequestProto request) throws ServiceException { // Ensure clientId is received byte[] clientId = Server.getClientId(); - Assert.assertNotNull(Server.getClientId()); - Assert.assertEquals(16, clientId.length); + assertNotNull(Server.getClientId()); + assertEquals(16, clientId.length); return EmptyResponseProto.newBuilder().build(); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java index a9eaccb3bf3df..2130b78b5e147 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java @@ -38,12 +38,10 @@ import org.apache.hadoop.thirdparty.protobuf.BlockingService; import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.io.IOException; import java.net.URISyntaxException; @@ -54,7 +52,7 @@ import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import static org.junit.Assume.assumeFalse; /** @@ -62,7 +60,6 @@ * This test depends on test.proto definition of types in src/test/proto * and protobuf service definition from src/test/test_rpc_service.proto */ -@RunWith(Parameterized.class) public class TestProtoBufRpc extends TestRpcBase { private static RPC.Server server; private final static int SLEEP_DURATION = 1000; @@ -77,9 +74,11 @@ public class TestProtoBufRpc extends TestRpcBase { */ private boolean testWithLegacyFirst; - public TestProtoBufRpc(Boolean testWithLegacy, Boolean testWithLegacyFirst) { - this.testWithLegacy = testWithLegacy; - this.testWithLegacyFirst = testWithLegacyFirst; + public void initTestProtoBufRpc(Boolean pTestWithLegacy, Boolean pTestWithLegacyFirst) + throws IOException { + this.testWithLegacy = pTestWithLegacy; + this.testWithLegacyFirst = pTestWithLegacyFirst; + setUp(); } @ProtocolInfo(protocolName = "testProto2", protocolVersion = 1) @@ -151,7 +150,6 @@ public TestProtosLegacy.SleepResponseProto sleep( } } - @Parameters public static Collection params() { Collection params = new ArrayList(); params.add(new Object[] {Boolean.TRUE, Boolean.TRUE }); @@ -160,7 +158,6 @@ public static Collection params() { return params; } - @Before @SuppressWarnings("deprecation") public void setUp() throws IOException { // Setup server for both protocols conf = new Configuration(); @@ -218,7 +215,7 @@ public void setUp() throws IOException { // Setup server for both protocols } - @After + @AfterEach public void tearDown() throws Exception { server.stop(); } @@ -231,8 +228,12 @@ private TestRpcService2Legacy getClientLegacy() throws IOException { return RPC.getProxy(TestRpcService2Legacy.class, 0, addr, conf); } - @Test (timeout=5000) - public void testProtoBufRpc() throws Exception { + @ParameterizedTest + @Timeout(value = 5) + @MethodSource("params") + public void testProtoBufRpc(boolean pTestWithLegacy, + boolean pTestWithLegacyFirst) throws Exception { + initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst); TestRpcService client = getClient(addr, conf); testProtoBufRpc(client); } @@ -261,9 +262,13 @@ public static void testProtoBufRpc(TestRpcService client) throws Exception { .isEqualTo(RpcErrorCodeProto.ERROR_RPC_SERVER); } } - - @Test (timeout=5000) - public void testProtoBufRpc2() throws Exception { + + @ParameterizedTest + // @Timeout(value = 5) + @MethodSource("params") + public void testProtoBufRpc2(boolean pTestWithLegacy, + boolean pTestWithLegacyFirst) throws Exception { + initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst); TestRpcService2 client = getClient2(); // Test ping method @@ -311,8 +316,12 @@ private void testProtobufLegacy() assertCounterGt("Echo2NumOps", 0L, rpcDetailedMetrics); } - @Test (timeout=5000) - public void testProtoBufRandomException() throws Exception { + @ParameterizedTest + @Timeout(value = 5) + @MethodSource("params") + public void testProtoBufRandomException(boolean pTestWithLegacy, + boolean pTestWithLegacyFirst) throws Exception { + initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst); //No test with legacy assumeFalse(testWithLegacy); TestRpcService client = getClient(addr, conf); @@ -329,9 +338,13 @@ public void testProtoBufRandomException() throws Exception { .isEqualTo(RpcErrorCodeProto.ERROR_APPLICATION); } } - - @Test(timeout=6000) - public void testExtraLongRpc() throws Exception { + + @ParameterizedTest + @Timeout(value = 6) + @MethodSource("params") + public void testExtraLongRpc(boolean pTestWithLegacy, + boolean pTestWithLegacyFirst) throws Exception { + initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst); //No test with legacy assumeFalse(testWithLegacy); TestRpcService2 client = getClient2(); @@ -350,9 +363,13 @@ public void testExtraLongRpc() throws Exception { } } - @Test(timeout = 12000) - public void testLogSlowRPC() throws IOException, ServiceException, + @ParameterizedTest + @Timeout(value = 12) + @MethodSource("params") + public void testLogSlowRPC(boolean pTestWithLegacy, + boolean pTestWithLegacyFirst) throws IOException, ServiceException, TimeoutException, InterruptedException { + initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst); //No test with legacy assumeFalse(testWithLegacy); server.setLogSlowRPCThresholdTime(SLEEP_DURATION); @@ -385,8 +402,12 @@ public void testLogSlowRPC() throws IOException, ServiceException, -> rpcMetrics.getRpcSlowCalls() == before + 1L, 10, 1000); } - @Test(timeout = 12000) - public void testEnsureNoLogIfDisabled() throws IOException, ServiceException { + @ParameterizedTest + @Timeout(value = 12) + @MethodSource("params") + public void testEnsureNoLogIfDisabled(boolean pTestWithLegacy, + boolean pTestWithLegacyFirst) throws IOException, ServiceException { + initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst); //No test with legacy assumeFalse(testWithLegacy); // disable slow RPC logging diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java index 0ae2d37d1ad1f..d7d390dc6d90e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java @@ -34,15 +34,16 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.protobuf.TestProtos; import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcHandoffProto; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestProtoBufRpcServerHandoff { @@ -53,7 +54,7 @@ public class TestProtoBufRpcServerHandoff { private static RPC.Server server = null; private static InetSocketAddress address = null; - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); @@ -77,7 +78,8 @@ public void setUp() throws IOException { LOG.info("Server started at: " + address + " at time: " + serverStartTime); } - @Test(timeout = 20000) + @Test + @Timeout(value = 20) public void test() throws Exception { final TestProtoBufRpcServerHandoffProtocol client = RPC.getProxy( TestProtoBufRpcServerHandoffProtocol.class, 1, address, conf); @@ -102,12 +104,13 @@ public void test() throws Exception { // Ensure the 5 second sleep responses are within a reasonable time of each // other. - Assert.assertTrue(Math.abs(callable1.endTime - callable2.endTime) < 2000l); - Assert.assertTrue(System.currentTimeMillis() - submitTime < 7000l); + assertTrue(Math.abs(callable1.endTime - callable2.endTime) < 2000L); + assertTrue(System.currentTimeMillis() - submitTime < 7000L); } - @Test(timeout = 20000) + @Test + @Timeout(value = 20) public void testHandoffMetrics() throws Exception { final TestProtoBufRpcServerHandoffProtocol client = RPC.getProxy( TestProtoBufRpcServerHandoffProtocol.class, 1, address, conf); @@ -132,8 +135,8 @@ public void testHandoffMetrics() throws Exception { // Ensure the 5 second sleep responses are within a reasonable time of each // other. - Assert.assertTrue(Math.abs(callable1.endTime - callable2.endTime) < 2000L); - Assert.assertTrue(System.currentTimeMillis() - submitTime < 7000L); + assertTrue(Math.abs(callable1.endTime - callable2.endTime) < 2000L); + assertTrue(System.currentTimeMillis() - submitTime < 7000L); // Check rpcMetrics MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java index bc72b6c126275..645432acdc50e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java @@ -53,10 +53,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MetricsAsserts; import org.apache.hadoop.test.MockitoUtil; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; @@ -106,16 +105,19 @@ import static org.apache.hadoop.test.MetricsAsserts.getDoubleGauge; import static org.apache.hadoop.test.MetricsAsserts.getLongCounter; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; /** Unit tests for RPC. */ @SuppressWarnings("deprecation") @@ -123,7 +125,7 @@ public class TestRPC extends TestRpcBase { public static final Logger LOG = LoggerFactory.getLogger(TestRPC.class); - @Before + @BeforeEach public void setup() { setupConf(); } @@ -232,7 +234,7 @@ public void run() { addResponse = proxy.add(null, addRequest); val = addResponse.getResult(); } catch (ServiceException e) { - assertTrue("Exception from RPC exchange() " + e, false); + assertTrue(false, "Exception from RPC exchange() " + e); } assertEquals(indata.length, outdata.length); assertEquals(3, val); @@ -265,7 +267,7 @@ public void run() { ping(true); done = true; } catch (ServiceException e) { - assertTrue("SlowRPC ping exception " + e, false); + assertTrue(false, "SlowRPC ping exception " + e); } } @@ -471,12 +473,12 @@ public void testSlowRpc() throws IOException, ServiceException { SlowRPC slowrpc = new SlowRPC(proxy); Thread thread = new Thread(slowrpc, "SlowRPC"); thread.start(); // send a slow RPC, which won't return until two fast pings - assertTrue("Slow RPC should not have finished1.", !slowrpc.isDone()); + assertTrue(!slowrpc.isDone(), "Slow RPC should not have finished1."); slowrpc.ping(false); // first fast ping // verify that the first RPC is still stuck - assertTrue("Slow RPC should not have finished2.", !slowrpc.isDone()); + assertTrue(!slowrpc.isDone(), "Slow RPC should not have finished2."); slowrpc.ping(false); // second fast ping @@ -658,8 +660,8 @@ private void doRPCs(Configuration myConf, boolean expectFailure) throws Exceptio if (expectFailure) { RemoteException re = (RemoteException) e.getCause(); assertTrue(re.unwrapRemoteException() instanceof AuthorizationException); - assertEquals("RPC error code should be UNAUTHORIZED", - RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode()); + assertEquals(RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode(), + "RPC error code should be UNAUTHORIZED"); } else { throw e; } @@ -734,9 +736,11 @@ public void testNoPings() throws Exception { * Test stopping a non-registered proxy * @throws IOException */ - @Test(expected=HadoopIllegalArgumentException.class) + @Test public void testStopNonRegisteredProxy() throws IOException { - RPC.stopProxy(null); + assertThrows(HadoopIllegalArgumentException.class, () -> { + RPC.stopProxy(null); + }); } /** @@ -799,8 +803,8 @@ public void testErrorMsgForInsecureClient() throws IOException { assertTrue(e.getCause() instanceof RemoteException); RemoteException re = (RemoteException) e.getCause(); LOG.info("LOGGING MESSAGE: " + re.getLocalizedMessage()); - assertEquals("RPC error code should be UNAUTHORIZED", - RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode()); + assertEquals(RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode(), + "RPC error code should be UNAUTHORIZED"); assertTrue(re.unwrapRemoteException() instanceof AccessControlException); succeeded = true; } finally { @@ -821,8 +825,8 @@ public void testErrorMsgForInsecureClient() throws IOException { } catch (ServiceException e) { RemoteException re = (RemoteException) e.getCause(); LOG.info("LOGGING MESSAGE: " + re.getLocalizedMessage()); - assertEquals("RPC error code should be UNAUTHORIZED", - RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode()); + assertEquals(RpcErrorCodeProto.FATAL_UNAUTHORIZED, re.getErrorCode(), + "RPC error code should be UNAUTHORIZED"); assertTrue(re.unwrapRemoteException() instanceof AccessControlException); succeeded = true; } finally { @@ -839,8 +843,8 @@ public void testStopsAllThreads() throws IOException, InterruptedException { Server server; int threadsBefore = countThreads("Server$Listener$Reader"); - assertEquals("Expect no Reader threads running before test", - 0, threadsBefore); + assertEquals(0, threadsBefore, + "Expect no Reader threads running before test"); server = setupTestServer(conf, 5); @@ -862,8 +866,8 @@ public void testStopsAllThreads() throws IOException, InterruptedException { } int threadsAfter = countThreads("Server$Listener$Reader"); - assertEquals("Expect no Reader threads left running after test", - 0, threadsAfter); + assertEquals(0, threadsAfter, + "Expect no Reader threads left running after test"); } @Test @@ -902,7 +906,8 @@ public void testRPCBuilder() throws IOException { } } - @Test(timeout=90000) + @Test + @Timeout(value = 90) public void testRPCInterruptedSimple() throws Exception { Server server; TestRpcService proxy = null; @@ -937,7 +942,8 @@ public void testRPCInterruptedSimple() throws Exception { } } - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testRPCInterrupted() throws Exception { Server server; @@ -996,7 +1002,7 @@ public void run() { latch.await(); // should not cause any other thread to get an error - assertTrue("rpc got exception " + error.get(), error.get() == null); + assertTrue(error.get() == null, "rpc got exception " + error.get()); } finally { server.stop(); } @@ -1010,17 +1016,18 @@ public void run() { * We use a mock SocketFactory so that we can control when the input and * output streams are frozen. */ - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testSlowConnection() throws Exception { - SocketFactory mockFactory = Mockito.mock(SocketFactory.class); - Socket mockSocket = Mockito.mock(Socket.class); - Mockito.when(mockFactory.createSocket()).thenReturn(mockSocket); - Mockito.when(mockSocket.getPort()).thenReturn(1234); - Mockito.when(mockSocket.getLocalPort()).thenReturn(2345); + SocketFactory mockFactory = mock(SocketFactory.class); + Socket mockSocket = mock(Socket.class); + when(mockFactory.createSocket()).thenReturn(mockSocket); + when(mockSocket.getPort()).thenReturn(1234); + when(mockSocket.getLocalPort()).thenReturn(2345); MockOutputStream mockOutputStream = new MockOutputStream(); - Mockito.when(mockSocket.getOutputStream()).thenReturn(mockOutputStream); + when(mockSocket.getOutputStream()).thenReturn(mockOutputStream); // Use an input stream that always blocks - Mockito.when(mockSocket.getInputStream()).thenReturn(new InputStream() { + when(mockSocket.getInputStream()).thenReturn(new InputStream() { @Override public int read() throws IOException { // wait forever @@ -1079,8 +1086,7 @@ public Void call() throws Exception { mockOutputStream.waitForWriters(); // interrupt all the threads for(int thread=0; thread < numThreads; ++thread) { - assertTrue("cancel thread " + thread, - futures[thread].cancel(true)); + assertTrue(futures[thread].cancel(true), "cancel thread " + thread); } // wait until all the writers are cancelled pool.shutdown(); @@ -1155,10 +1161,11 @@ public void waitForWriters() throws InterruptedException { * This test causes an exception in the RPC connection setup to make * sure that threads aren't leaked. */ - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testBadSetup() throws Exception { - SocketFactory mockFactory = Mockito.mock(SocketFactory.class); - Mockito.when(mockFactory.createSocket()) + SocketFactory mockFactory = mock(SocketFactory.class); + when(mockFactory.createSocket()) .thenThrow(new IOException("can't connect")); Configuration clientConf = new Configuration(); // Set an illegal value to cause an exception in the constructor @@ -1178,12 +1185,12 @@ public void testBadSetup() throws Exception { clientConf, mockFactory).getProxy(); client.ping(null, newEmptyRequest()); - assertTrue("Didn't throw exception!", false); + assertTrue(false, "Didn't throw exception!"); } catch (ServiceException nfe) { // ensure no extra threads are running. assertEquals(threadCount, Thread.getAllStackTraces().size()); } catch (Throwable t) { - assertTrue("wrong exception: " + t, false); + assertTrue(false, "wrong exception: " + t); } } finally { if (client != null) { @@ -1211,7 +1218,8 @@ public void testConnectionPing() throws Exception { } } - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testExternalCall() throws Exception { final UserGroupInformation ugi = UserGroupInformation .createUserForTesting("user123", new String[0]); @@ -1334,18 +1342,18 @@ public TestRpcService run() { } MetricsRecordBuilder rpcMetrics = getMetrics(server.getRpcMetrics().name()); - assertEquals("Expected correct rpc en queue count", - 3000, getLongCounter("RpcEnQueueTimeNumOps", rpcMetrics)); - assertEquals("Expected correct rpc queue count", - 3000, getLongCounter("RpcQueueTimeNumOps", rpcMetrics)); - assertEquals("Expected correct rpc processing count", - 3000, getLongCounter("RpcProcessingTimeNumOps", rpcMetrics)); - assertEquals("Expected correct rpc lock wait count", - 3000, getLongCounter("RpcLockWaitTimeNumOps", rpcMetrics)); - assertEquals("Expected correct rpc response count", - 3000, getLongCounter("RpcResponseTimeNumOps", rpcMetrics)); - assertEquals("Expected zero rpc lock wait time", - 0, getDoubleGauge("RpcLockWaitTimeAvgTime", rpcMetrics), 0.001); + assertEquals(3000, getLongCounter("RpcEnQueueTimeNumOps", rpcMetrics), + "Expected correct rpc en queue count"); + assertEquals(3000, getLongCounter("RpcQueueTimeNumOps", rpcMetrics), + "Expected correct rpc queue count"); + assertEquals(3000, getLongCounter("RpcProcessingTimeNumOps", rpcMetrics), + "Expected correct rpc processing count"); + assertEquals(3000, getLongCounter("RpcLockWaitTimeNumOps", rpcMetrics), + "Expected correct rpc lock wait count"); + assertEquals(3000, getLongCounter("RpcResponseTimeNumOps", rpcMetrics), + "Expected correct rpc response count"); + assertEquals(0, getDoubleGauge("RpcLockWaitTimeAvgTime", rpcMetrics), 0.001, + "Expected zero rpc lock wait time"); MetricsAsserts.assertQuantileGauges("RpcEnQueueTime" + interval + "s", rpcMetrics); MetricsAsserts.assertQuantileGauges("RpcQueueTime" + interval + "s", @@ -1484,7 +1492,8 @@ public void testOverallRpcProcessingTimeMetric() throws Exception { /** * Test RPC backoff by queue full. */ - @Test (timeout=30000) + @Test + @Timeout(value = 30) public void testClientBackOff() throws Exception { Server server; final TestRpcService proxy; @@ -1540,13 +1549,14 @@ public Void call() throws ServiceException, InterruptedException { if (lastException != null) { LOG.error("Last received non-RetriableException:", lastException); } - assertTrue("RetriableException not received", succeeded); + assertTrue(succeeded, "RetriableException not received"); } /** * Test RPC backoff by response time of each priority level. */ - @Test (timeout=30000) + @Test + @Timeout(value = 30) public void testClientBackOffByResponseTime() throws Exception { final TestRpcService proxy; boolean succeeded = false; @@ -1604,11 +1614,12 @@ public Void call() throws ServiceException, InterruptedException { if (lastException != null) { LOG.error("Last received non-RetriableException:", lastException); } - assertTrue("RetriableException not received", succeeded); + assertTrue(succeeded, "RetriableException not received"); } /** Test that the metrics for DecayRpcScheduler are updated. */ - @Test (timeout=30000) + @Test + @Timeout(value = 30) public void testDecayRpcSchedulerMetrics() throws Exception { final String ns = CommonConfigurationKeys.IPC_NAMESPACE + ".0"; Server server = setupDecayRpcSchedulerandTestServer(ns + "."); @@ -1664,7 +1675,8 @@ public void testDecayRpcSchedulerMetrics() throws Exception { } } - @Test (timeout=30000) + @Test + @Timeout(value = 30) public void testProtocolUserPriority() throws Exception { final String ns = CommonConfigurationKeys.IPC_NAMESPACE + ".0"; conf.set(CLIENT_PRINCIPAL_KEY, "clientForProtocol"); @@ -1674,15 +1686,15 @@ public void testProtocolUserPriority() throws Exception { UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user"); // normal users start with priority 0. - Assert.assertEquals(0, server.getPriorityLevel(ugi)); + assertEquals(0, server.getPriorityLevel(ugi)); // calls for a protocol defined client will have priority of 0. - Assert.assertEquals(0, server.getPriorityLevel(newSchedulable(ugi))); + assertEquals(0, server.getPriorityLevel(newSchedulable(ugi))); // protocol defined client will have top priority of -1. ugi = UserGroupInformation.createRemoteUser("clientForProtocol"); - Assert.assertEquals(-1, server.getPriorityLevel(ugi)); + assertEquals(-1, server.getPriorityLevel(ugi)); // calls for a protocol defined client will have priority of 0. - Assert.assertEquals(0, server.getPriorityLevel(newSchedulable(ugi))); + assertEquals(0, server.getPriorityLevel(newSchedulable(ugi))); } finally { stop(server, null); } @@ -1732,7 +1744,8 @@ private Server setupDecayRpcSchedulerandTestServer(String ns) /** * Test RPC timeout. */ - @Test(timeout=30000) + @Test + @Timeout(value = 30) public void testClientRpcTimeout() throws Exception { Server server; TestRpcService proxy = null; @@ -1809,21 +1822,21 @@ public void testClientRpcTimeout() throws Exception { @Test public void testServerNameFromClass() { - Assert.assertEquals("TestRPC", + assertEquals("TestRPC", RPC.Server.serverNameFromClass(this.getClass())); - Assert.assertEquals("TestClass", + assertEquals("TestClass", RPC.Server.serverNameFromClass(TestRPC.TestClass.class)); Object testing = new TestClass().classFactory(); - Assert.assertEquals("Embedded", + assertEquals("Embedded", RPC.Server.serverNameFromClass(testing.getClass())); testing = new TestClass().classFactoryAbstract(); - Assert.assertEquals("TestClass", + assertEquals("TestClass", RPC.Server.serverNameFromClass(testing.getClass())); testing = new TestClass().classFactoryObject(); - Assert.assertEquals("TestClass", + assertEquals("TestClass", RPC.Server.serverNameFromClass(testing.getClass())); } @@ -1875,7 +1888,8 @@ public boolean equals(Object t) { } } - @Test (timeout=30000) + @Test + @Timeout(value = 30) public void testReaderExceptions() throws Exception { Server server = null; TestRpcService proxy = null; @@ -1928,33 +1942,33 @@ public RpcStatusProto getRpcStatusProto() { fail(reqName + " didn't fail"); } catch (ServiceException e) { RemoteException re = (RemoteException)e.getCause(); - assertEquals(reqName, expectedIOE, re.unwrapRemoteException()); + assertEquals(expectedIOE, re.unwrapRemoteException(), reqName); } // check authorizations to ensure new connection when expected, // then conclusively determine if connections are disconnected // correctly. - assertEquals(reqName, expectedAuths, authMetric.value()); + assertEquals(expectedAuths, authMetric.value(), reqName); if (!doDisconnect) { // if it wasn't fatal, verify there's only one open connection. Connection[] conns = server.getConnections(); - assertEquals(reqName, 1, conns.length); + assertEquals(1, conns.length, reqName); String connectionInfo = conns[0].toString(); LOG.info("Connection is from: {}", connectionInfo); - assertEquals( + assertEquals(1, connectionInfo.split(" / ").length, "Connection string representation should include only IP address for healthy " - + "connection", 1, connectionInfo.split(" / ").length); + + "connection"); // verify whether the connection should have been reused. if (isDisconnected) { - assertNotSame(reqName, lastConn, conns[0]); + assertNotSame(lastConn, conns[0], reqName); } else { - assertSame(reqName, lastConn, conns[0]); + assertSame(lastConn, conns[0], reqName); } lastConn = conns[0]; } else if (lastConn != null) { // avoid race condition in server where connection may not be // fully removed yet. just make sure it's marked for being closed. // the open connection checks above ensure correct behavior. - assertTrue(reqName, lastConn.shouldClose()); + assertTrue(lastConn.shouldClose(), reqName); } isDisconnected = doDisconnect; } @@ -2010,8 +2024,8 @@ public void testRpcMetricsInNanos() throws Exception { } MetricsRecordBuilder rpcMetrics = getMetrics(server.getRpcMetrics().name()); - assertEquals("Expected zero rpc lock wait time", - 0, getDoubleGauge("RpcLockWaitTimeAvgTime", rpcMetrics), 0.001); + assertEquals(0, getDoubleGauge("RpcLockWaitTimeAvgTime", rpcMetrics), + 0.001, "Expected zero rpc lock wait time"); MetricsAsserts.assertQuantileGauges("RpcEnQueueTime" + interval + "s", rpcMetrics); MetricsAsserts.assertQuantileGauges("RpcQueueTime" + interval + "s", diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCallBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCallBenchmark.java index 6d83d7d368cbd..b87b8db2b762f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCallBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCallBenchmark.java @@ -17,15 +17,17 @@ */ package org.apache.hadoop.ipc; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.util.ToolRunner; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestRPCCallBenchmark { - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testBenchmarkWithProto() throws Exception { int rc = ToolRunner.run(new RPCCallBenchmark(), new String[] { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java index 22fdcbbe14e65..bf7cb7c3daa2f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java @@ -19,9 +19,9 @@ package org.apache.hadoop.ipc; import org.apache.hadoop.conf.Configuration; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,8 +29,8 @@ import java.lang.reflect.Method; import java.net.InetSocketAddress; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; /** Unit test for supporting method-name based compatible RPCs. */ public class TestRPCCompatibility { @@ -109,7 +109,7 @@ public long getProtocolVersion(String protocol, } - @Before + @BeforeEach public void setUp() { ProtocolSignature.resetCache(); @@ -129,7 +129,7 @@ public void setUp() { TestProtocol4.class, ProtobufRpcEngine2.class); } - @After + @AfterEach public void tearDown() { if (proxy != null) { RPC.stopProxy(proxy.getProxy()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java index 39705b06c67c0..ef7d85c2c7955 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java @@ -20,8 +20,9 @@ import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,9 +35,9 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** Split from TestRPC. */ @SuppressWarnings("deprecation") @@ -45,7 +46,7 @@ public class TestRPCServerShutdown extends TestRpcBase { public static final Logger LOG = LoggerFactory.getLogger(TestRPCServerShutdown.class); - @Before + @BeforeEach public void setup() { setupConf(); } @@ -53,7 +54,8 @@ public void setup() { /** * Verify the RPC server can shutdown properly when callQueue is full. */ - @Test (timeout=30000) + @Test + @Timeout(value = 30) public void testRPCServerShutdown() throws Exception { final int numClients = 3; final List> res = new ArrayList>(); @@ -87,15 +89,15 @@ public Void call() throws ServiceException, InterruptedException { } finally { try { stop(server, proxy); - assertEquals("Not enough clients", numClients, res.size()); + assertEquals(numClients, res.size(), "Not enough clients"); for (Future f : res) { try { f.get(); fail("Future get should not return"); } catch (ExecutionException e) { ServiceException se = (ServiceException) e.getCause(); - assertTrue("Unexpected exception: " + se, - se.getCause() instanceof IOException); + assertTrue(se.getCause() instanceof IOException, + "Unexpected exception: " + se); LOG.info("Expected exception", e.getCause()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java index 90973d2674c01..30e5ed54953d2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java @@ -18,9 +18,9 @@ package org.apache.hadoop.ipc; import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +31,8 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * tests that the proxy can be interrupted @@ -41,7 +43,7 @@ public class TestRPCWaitForProxy extends TestRpcBase { private static final Configuration conf = new Configuration(); - @Before + @BeforeEach public void setupProtocolEngine() { RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class); @@ -53,14 +55,15 @@ public void setupProtocolEngine() { * * @throws Throwable any exception other than that which was expected */ - @Test(timeout = 50000) + @Test + @Timeout(value = 50) public void testWaitForProxy() throws Throwable { RpcThread worker = new RpcThread(0); worker.start(); worker.join(); Throwable caught = worker.getCaught(); Throwable cause = caught.getCause(); - Assert.assertNotNull("No exception was raised", cause); + assertNotNull(cause, "No exception was raised"); if (!(cause instanceof ConnectException)) { throw caught; } @@ -72,16 +75,17 @@ public void testWaitForProxy() throws Throwable { * * @throws Throwable any exception other than that which was expected */ - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testInterruptedWaitForProxy() throws Throwable { RpcThread worker = new RpcThread(100); worker.start(); Thread.sleep(1000); - Assert.assertTrue("worker hasn't started", worker.waitStarted); + assertTrue(worker.waitStarted, "worker hasn't started"); worker.interrupt(); worker.join(); Throwable caught = worker.getCaught(); - Assert.assertNotNull("No exception was raised", caught); + assertNotNull(caught, "No exception was raised"); // looking for the root cause here, which can be wrapped // as part of the NetUtils work. Having this test look // a the type of exception there would be brittle to improvements diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestResponseBuffer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestResponseBuffer.java index 98743be94a424..f927e56979bac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestResponseBuffer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestResponseBuffer.java @@ -18,12 +18,12 @@ package org.apache.hadoop.ipc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import org.apache.hadoop.ipc.ResponseBuffer; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Unit tests for ResponseBuffer. */ public class TestResponseBuffer { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCache.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCache.java index b789ada5271ff..e01011cd5975a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCache.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCache.java @@ -29,9 +29,10 @@ import org.apache.hadoop.ipc.RPC.RpcKind; import org.apache.hadoop.ipc.RetryCache.CacheEntryWithPayload; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Tests for {@link RetryCache} @@ -42,7 +43,7 @@ public class TestRetryCache { private static final Random r = new Random(); private static final TestServer testServer = new TestServer(); - @Before + @BeforeEach public void setup() { testServer.resetCounters(); } @@ -177,7 +178,7 @@ public void testOperations(final int input, final int numberOfThreads, for (int i = 0; i < numberOfThreads; i++) { Callable worker = () -> { Server.getCurCall().set(call); - Assert.assertEquals(Server.getCurCall().get(), call); + assertEquals(Server.getCurCall().get(), call); int randomPause = pause == 0 ? pause : r.nextInt(pause); return testServer.echo(input, failureOutput, randomPause, success); }; @@ -185,12 +186,12 @@ public void testOperations(final int input, final int numberOfThreads, list.add(submit); } - Assert.assertEquals(numberOfThreads, list.size()); + assertEquals(numberOfThreads, list.size()); for (Future future : list) { if (success) { - Assert.assertEquals(input, future.get().intValue()); + assertEquals(input, future.get().intValue()); } else { - Assert.assertEquals(failureOutput, future.get().intValue()); + assertEquals(failureOutput, future.get().intValue()); } } @@ -198,15 +199,15 @@ public void testOperations(final int input, final int numberOfThreads, // If the operation was successful, all the subsequent operations // by other threads should be retries. Operation count should be 1. int retries = numberOfThreads + (attemptedBefore ? 0 : -1); - Assert.assertEquals(1, testServer.operationCount.get()); - Assert.assertEquals(retries, testServer.retryCount.get()); + assertEquals(1, testServer.operationCount.get()); + assertEquals(retries, testServer.retryCount.get()); } else { // If the operation failed, all the subsequent operations // should execute once more, hence the retry count should be 0 and // operation count should be the number of tries int opCount = numberOfThreads + (attemptedBefore ? 1 : 0); - Assert.assertEquals(opCount, testServer.operationCount.get()); - Assert.assertEquals(0, testServer.retryCount.get()); + assertEquals(opCount, testServer.operationCount.get()); + assertEquals(0, testServer.retryCount.get()); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCacheMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCacheMetrics.java index b95286ccb519d..072e660490106 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCacheMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRetryCacheMetrics.java @@ -19,11 +19,12 @@ import org.apache.hadoop.ipc.metrics.RetryCacheMetrics; import org.apache.hadoop.metrics2.MetricsRecordBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * Tests for {@link RetryCacheMetrics} diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java index 65558a7980a2d..9b16ee8ee6dde 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.ipc; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -26,15 +26,16 @@ import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.TestConnectionRetryPolicy; import org.apache.hadoop.ipc.Client.ConnectionId; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * This class mainly tests behaviors of reusing RPC connections for various * retry policies. */ public class TestReuseRpcConnections extends TestRpcBase { - @Before + @BeforeEach public void setup() { setupConf(); } @@ -60,7 +61,8 @@ private static RetryPolicy getDefaultRetryPolicy( remoteExceptionToRetry); } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testDefaultRetryPolicyReuseConnections() throws Exception { RetryPolicy rp1 = null; RetryPolicy rp2 = null; @@ -103,7 +105,8 @@ public void testDefaultRetryPolicyReuseConnections() throws Exception { verifyRetryPolicyReuseConnections(rp1, rp2, RetryPolicies.RETRY_FOREVER); } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testRetryPolicyTryOnceThenFail() throws Exception { final RetryPolicy rp1 = TestConnectionRetryPolicy.newTryOnceThenFail(); final RetryPolicy rp2 = TestConnectionRetryPolicy.newTryOnceThenFail(); @@ -130,21 +133,21 @@ private void verifyRetryPolicyReuseConnections( proxy1.ping(null, newEmptyRequest()); client = ProtobufRpcEngine2.getClient(newConf); final Set conns = client.getConnectionIds(); - assertEquals("number of connections in cache is wrong", 1, conns.size()); + assertEquals(1, conns.size(), "number of connections in cache is wrong"); /* * another equivalent retry policy, reuse connection */ proxy2 = getClient(addr, newConf, retryPolicy2); proxy2.ping(null, newEmptyRequest()); - assertEquals("number of connections in cache is wrong", 1, conns.size()); + assertEquals(1, conns.size(), "number of connections in cache is wrong"); /* * different retry policy, create a new connection */ proxy3 = getClient(addr, newConf, anotherRetryPolicy); proxy3.ping(null, newEmptyRequest()); - assertEquals("number of connections in cache is wrong", 2, conns.size()); + assertEquals(2, conns.size(), "number of connections in cache is wrong"); } finally { server.stop(); // this is dirty, but clear out connection cache for next run diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java index 5b5c8bbaa9b73..541beb38dd8fd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java @@ -35,7 +35,6 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.util.Time; -import org.junit.Assert; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.RetryPolicy; @@ -61,6 +60,9 @@ import java.util.List; import java.util.concurrent.CountDownLatch; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + /** Test facilities for unit tests for RPC. */ public class TestRpcBase { @@ -374,8 +376,8 @@ public TestProtos.EmptyResponseProto ping(RpcController unused, TestProtos.EmptyRequestProto request) throws ServiceException { // Ensure clientId is received byte[] clientId = Server.getClientId(); - Assert.assertNotNull(clientId); - Assert.assertEquals(ClientId.BYTE_LENGTH, clientId.length); + assertNotNull(clientId); + assertEquals(ClientId.BYTE_LENGTH, clientId.length); return TestProtos.EmptyResponseProto.newBuilder().build(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java index 2e0b3daa220a2..f30a6165edc16 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java @@ -34,11 +34,15 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.net.NetUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + public class TestRpcServerHandoff { public static final Logger LOG = @@ -97,7 +101,8 @@ void sendError() { } } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDeferredResponse() throws IOException, InterruptedException, ExecutionException { @@ -120,7 +125,7 @@ public void testDeferredResponse() throws IOException, InterruptedException, server.sendResponse(); BytesWritable response = (BytesWritable) future.get(); - Assert.assertEquals(new BytesWritable(requestBytes), response); + assertEquals(new BytesWritable(requestBytes), response); } finally { if (server != null) { server.stop(); @@ -128,7 +133,8 @@ public void testDeferredResponse() throws IOException, InterruptedException, } } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDeferredException() throws IOException, InterruptedException, ExecutionException { ServerForHandoffTest server = new ServerForHandoffTest(2); @@ -149,12 +155,12 @@ public void testDeferredException() throws IOException, InterruptedException, server.sendError(); try { future.get(); - Assert.fail("Call succeeded. Was expecting an exception"); + fail("Call succeeded. Was expecting an exception"); } catch (ExecutionException e) { Throwable cause = e.getCause(); - Assert.assertTrue(cause instanceof RemoteException); + assertTrue(cause instanceof RemoteException); RemoteException re = (RemoteException) cause; - Assert.assertTrue(re.toString().contains("DeferredError")); + assertTrue(re.toString().contains("DeferredError")); } } finally { if (server != null) { @@ -170,7 +176,7 @@ private void awaitResponseTimeout(FutureTask future) throws while (sleepTime > 0) { try { future.get(200L, TimeUnit.MILLISECONDS); - Assert.fail("Expected to timeout since" + + fail("Expected to timeout since" + " the deferred response hasn't been registered"); } catch (TimeoutException e) { // Ignoring. Expected to time out. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcWritable.java index 6beae7d12b4c7..69217c674beaa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcWritable.java @@ -26,11 +26,13 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto; import org.apache.hadoop.util.Time; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.thirdparty.protobuf.Message; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestRpcWritable {//extends TestRpcBase { static Writable writable = new LongWritable(Time.now()); @@ -49,8 +51,8 @@ public void testWritableWrapper() throws IOException { // deserial LongWritable actual = RpcWritable.wrap(new LongWritable()) .readFrom(bb); - Assert.assertEquals(writable, actual); - Assert.assertEquals(0, bb.remaining()); + assertEquals(writable, actual); + assertEquals(0, bb.remaining()); } @Test @@ -61,8 +63,8 @@ public void testProtobufWrapper() throws IOException { Message actual = RpcWritable.wrap(EchoRequestProto.getDefaultInstance()) .readFrom(bb); - Assert.assertEquals(message1, actual); - Assert.assertEquals(0, bb.remaining()); + assertEquals(message1, actual); + assertEquals(0, bb.remaining()); } @Test @@ -75,23 +77,23 @@ public void testBufferWrapper() throws IOException { ByteBuffer bb = ByteBuffer.wrap(baos.toByteArray()); RpcWritable.Buffer buf = RpcWritable.Buffer.wrap(bb); - Assert.assertEquals(baos.size(), bb.remaining()); - Assert.assertEquals(baos.size(), buf.remaining()); + assertEquals(baos.size(), bb.remaining()); + assertEquals(baos.size(), buf.remaining()); Object actual = buf.getValue(EchoRequestProto.getDefaultInstance()); - Assert.assertEquals(message1, actual); - Assert.assertTrue(bb.remaining() > 0); - Assert.assertEquals(bb.remaining(), buf.remaining()); + assertEquals(message1, actual); + assertTrue(bb.remaining() > 0); + assertEquals(bb.remaining(), buf.remaining()); actual = buf.getValue(EchoRequestProto.getDefaultInstance()); - Assert.assertEquals(message2, actual); - Assert.assertTrue(bb.remaining() > 0); - Assert.assertEquals(bb.remaining(), buf.remaining()); + assertEquals(message2, actual); + assertTrue(bb.remaining() > 0); + assertEquals(bb.remaining(), buf.remaining()); actual = buf.newInstance(LongWritable.class, null); - Assert.assertEquals(writable, actual); - Assert.assertEquals(0, bb.remaining()); - Assert.assertEquals(0, buf.remaining()); + assertEquals(writable, actual); + assertEquals(0, bb.remaining()); + assertEquals(0, buf.remaining()); } @Test @@ -103,27 +105,27 @@ public void testBufferWrapperNested() throws IOException { message2.writeDelimitedTo(dos); ByteBuffer bb = ByteBuffer.wrap(baos.toByteArray()); RpcWritable.Buffer buf1 = RpcWritable.Buffer.wrap(bb); - Assert.assertEquals(baos.size(), bb.remaining()); - Assert.assertEquals(baos.size(), buf1.remaining()); + assertEquals(baos.size(), bb.remaining()); + assertEquals(baos.size(), buf1.remaining()); Object actual = buf1.newInstance(LongWritable.class, null); - Assert.assertEquals(writable, actual); + assertEquals(writable, actual); int left = bb.remaining(); - Assert.assertTrue(left > 0); - Assert.assertEquals(left, buf1.remaining()); + assertTrue(left > 0); + assertEquals(left, buf1.remaining()); // original bb now appears empty, but rpc writable has a slice of the bb. RpcWritable.Buffer buf2 = buf1.newInstance(RpcWritable.Buffer.class, null); - Assert.assertEquals(0, bb.remaining()); - Assert.assertEquals(0, buf1.remaining()); - Assert.assertEquals(left, buf2.remaining()); + assertEquals(0, bb.remaining()); + assertEquals(0, buf1.remaining()); + assertEquals(left, buf2.remaining()); actual = buf2.getValue(EchoRequestProto.getDefaultInstance()); - Assert.assertEquals(message1, actual); - Assert.assertTrue(buf2.remaining() > 0); + assertEquals(message1, actual); + assertTrue(buf2.remaining() > 0); actual = buf2.getValue(EchoRequestProto.getDefaultInstance()); - Assert.assertEquals(message2, actual); - Assert.assertEquals(0, buf2.remaining()); + assertEquals(message2, actual); + assertEquals(0, buf2.remaining()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 23eb69984d6cc..68fa10c6c1466 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -34,13 +34,10 @@ import org.apache.hadoop.security.token.*; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; @@ -81,18 +78,17 @@ import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.KERBEROS; import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.SIMPLE; import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.TOKEN; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** Unit tests for using Sasl over RPC. */ -@RunWith(Parameterized.class) public class TestSaslRPC extends TestRpcBase { - @Parameters + public static Collection data() { Collection params = new ArrayList<>(); for (QualityOfProtection qop : QualityOfProtection.values()) { @@ -112,13 +108,14 @@ public static Collection data() { QualityOfProtection[] qop; QualityOfProtection expectedQop; String saslPropertiesResolver ; - - public TestSaslRPC(QualityOfProtection[] qop, - QualityOfProtection expectedQop, - String saslPropertiesResolver) { - this.qop=qop; - this.expectedQop = expectedQop; - this.saslPropertiesResolver = saslPropertiesResolver; + + public void initTestSaslRPC(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, + String pSaslPropertiesResolver) { + this.qop = pQop; + this.expectedQop = pExpectedQop; + this.saslPropertiesResolver = pSaslPropertiesResolver; + setup(); } public static final Logger LOG = LoggerFactory.getLogger(TestSaslRPC.class); @@ -142,14 +139,13 @@ enum UseToken { OTHER() } - @BeforeClass + @BeforeAll public static void setupKerb() { System.setProperty("java.security.krb5.kdc", ""); System.setProperty("java.security.krb5.realm", "NONE"); Security.addProvider(new SaslPlainServer.SecurityProvider()); } - @Before public void setup() { LOG.info("---------------------------------"); LOG.info("Testing QOP:"+ getQOPNames(qop)); @@ -239,16 +235,23 @@ public Class annotationType() { } } - @Test - public void testDigestRpc() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testDigestRpc(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) + throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); TestTokenSecretManager sm = new TestTokenSecretManager(); final Server server = setupTestServer(conf, 5, sm); doDigestRpc(server, sm); } - @Test - public void testDigestRpcWithoutAnnotation() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testDigestRpcWithoutAnnotation(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); TestTokenSecretManager sm = new TestTokenSecretManager(); try { SecurityUtil.setSecurityInfoProviders(new CustomSecurityInfo()); @@ -259,8 +262,11 @@ public void testDigestRpcWithoutAnnotation() throws Exception { } } - @Test - public void testErrorMessage() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testErrorMessage(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); BadTokenSecretManager sm = new BadTokenSecretManager(); final Server server = setupTestServer(conf, 5, sm); @@ -301,8 +307,8 @@ private void doDigestRpc(Server server, TestTokenSecretManager sm) for (Connection connection : server.getConnections()) { // only qop auth should dispose of the sasl server boolean hasServer = (connection.saslServer != null); - assertTrue("qop:" + expectedQop + " hasServer:" + hasServer, - (expectedQop == QualityOfProtection.AUTHENTICATION) ^ hasServer); + assertTrue((expectedQop == QualityOfProtection.AUTHENTICATION) ^ hasServer, + "qop:" + expectedQop + " hasServer:" + hasServer); n++; } assertTrue(n > 0); @@ -312,8 +318,11 @@ private void doDigestRpc(Server server, TestTokenSecretManager sm) } } - @Test - public void testPingInterval() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testPingInterval(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); Configuration newConf = new Configuration(conf); newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_1); conf.setInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, @@ -331,9 +340,12 @@ public void testPingInterval() throws Exception { TestRpcService.class, null, 0, null, newConf); assertEquals(0, remoteId.getPingInterval()); } - - @Test - public void testPerConnectionConf() throws Exception { + + @ParameterizedTest + @MethodSource("data") + public void testPerConnectionConf(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); TestTokenSecretManager sm = new TestTokenSecretManager(); final Server server = setupTestServer(conf, 5, sm); final UserGroupInformation current = UserGroupInformation.getCurrentUser(); @@ -359,16 +371,16 @@ public void testPerConnectionConf() throws Exception { proxy1.getAuthMethod(null, newEmptyRequest()); client = ProtobufRpcEngine2.getClient(newConf); Set conns = client.getConnectionIds(); - assertEquals("number of connections in cache is wrong", 1, conns.size()); + assertEquals(1, conns.size(), "number of connections in cache is wrong"); // same conf, connection should be re-used proxy2 = getClient(addr, newConf); proxy2.getAuthMethod(null, newEmptyRequest()); - assertEquals("number of connections in cache is wrong", 1, conns.size()); + assertEquals(1, conns.size(), "number of connections in cache is wrong"); // different conf, new connection should be set up newConf.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, timeouts[1]); proxy3 = getClient(addr, newConf); proxy3.getAuthMethod(null, newEmptyRequest()); - assertEquals("number of connections in cache is wrong", 2, conns.size()); + assertEquals(2, conns.size(), "number of connections in cache is wrong"); // now verify the proxies have the correct connection ids and timeouts ConnectionId[] connsArray = { RPC.getConnectionIdForProxy(proxy1), @@ -412,15 +424,21 @@ static void testKerberosRpc(String principal, String keytab) throws Exception { System.out.println("Test is successful."); } - @Test - public void testSaslPlainServer() throws IOException { + @ParameterizedTest + @MethodSource("data") + public void testSaslPlainServer(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws IOException { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); runNegotiation( new TestPlainCallbacks.Client("user", "pass"), new TestPlainCallbacks.Server("user", "pass")); } - @Test - public void testSaslPlainServerBadPassword() { + @ParameterizedTest + @MethodSource("data") + public void testSaslPlainServerBadPassword(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); SaslException e = null; try { runNegotiation( @@ -436,9 +454,9 @@ public void testSaslPlainServerBadPassword() { } private void assertContains(String expected, String text) { - assertNotNull("null text", text ); - assertTrue("No {" + expected + "} in {" + text + "}", - text.contains(expected)); + assertNotNull(text, "null text"); + assertTrue(text.contains(expected), + "No {" + expected + "} in {" + text + "}"); } private void runNegotiation(CallbackHandler clientCbh, @@ -452,7 +470,7 @@ private void runNegotiation(CallbackHandler clientCbh, SaslServer saslServer = Sasl.createSaslServer( mechanism, null, "localhost", null, serverCbh); - assertNotNull("failed to find PLAIN server", saslServer); + assertNotNull(saslServer, "failed to find PLAIN server"); byte[] response = saslClient.evaluateChallenge(new byte[0]); assertNotNull(response); @@ -561,8 +579,11 @@ private static Pattern No(AuthMethod ... method) { /* * simple server */ - @Test - public void testSimpleServer() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testSimpleServer(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE, SIMPLE)); assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE, SIMPLE, UseToken.OTHER)); // SASL methods are normally reverted to SIMPLE @@ -581,8 +602,11 @@ public void testSimpleServer() throws Exception { * This test mimics this behaviour, and asserts the fallback whether it is set correctly. * @see HADOOP-17975 */ - @Test - public void testClientFallbackToSimpleAuthForASecondClient() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testClientFallbackToSimpleAuthForASecondClient(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); Configuration serverConf = createConfForAuth(SIMPLE); Server server = startServer(serverConf, setupServerUgi(SIMPLE, serverConf), @@ -632,13 +656,16 @@ public void testClientFallbackToSimpleAuthForASecondClient() throws Exception { server.stop(); } - assertTrue("First client does not set to fall back properly.", fallbackToSimpleAuth1.get()); - assertTrue("Second client does not set to fall back properly.", fallbackToSimpleAuth2.get()); + assertTrue(fallbackToSimpleAuth1.get(), "First client does not set to fall back properly."); + assertTrue(fallbackToSimpleAuth2.get(), "Second client does not set to fall back properly."); } - @Test - public void testNoClientFallbackToSimple() + @ParameterizedTest + @MethodSource("data") + public void testNoClientFallbackToSimple(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); clientFallBackToSimpleAllowed = false; // tokens are irrelevant w/o secret manager enabled assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE, SIMPLE)); @@ -679,8 +706,11 @@ public void testNoClientFallbackToSimple() assertAuthEquals(BadToken, getAuthMethod(KERBEROS, TOKEN, UseToken.INVALID)); } - @Test - public void testSimpleServerWithTokens() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testSimpleServerWithTokens(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); // Client not using tokens assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE, SIMPLE)); // SASL methods are reverted to SIMPLE @@ -708,8 +738,11 @@ public void testSimpleServerWithTokens() throws Exception { assertAuthEquals(SIMPLE, getAuthMethod(KERBEROS, SIMPLE, UseToken.OTHER)); } - @Test - public void testSimpleServerWithInvalidTokens() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testSimpleServerWithInvalidTokens(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); // Tokens are ignored because client is reverted to simple assertAuthEquals(SIMPLE, getAuthMethod(SIMPLE, SIMPLE, UseToken.INVALID)); assertAuthEquals(SIMPLE, getAuthMethod(KERBEROS, SIMPLE, UseToken.INVALID)); @@ -724,8 +757,11 @@ public void testSimpleServerWithInvalidTokens() throws Exception { /* * token server */ - @Test - public void testTokenOnlyServer() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testTokenOnlyServer(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); // simple client w/o tokens won't try SASL, so server denies assertAuthEquals(Denied(SIMPLE), getAuthMethod(SIMPLE, TOKEN)); assertAuthEquals(No(TOKEN), getAuthMethod(SIMPLE, TOKEN, UseToken.OTHER)); @@ -733,8 +769,11 @@ public void testTokenOnlyServer() throws Exception { assertAuthEquals(No(TOKEN), getAuthMethod(KERBEROS, TOKEN, UseToken.OTHER)); } - @Test - public void testTokenOnlyServerWithTokens() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testTokenOnlyServerWithTokens(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); assertAuthEquals(TOKEN, getAuthMethod(SIMPLE, TOKEN, UseToken.VALID)); assertAuthEquals(TOKEN, getAuthMethod(KERBEROS, TOKEN, UseToken.VALID)); enableSecretManager = false; @@ -742,8 +781,11 @@ public void testTokenOnlyServerWithTokens() throws Exception { assertAuthEquals(NoTokenAuth, getAuthMethod(KERBEROS, TOKEN, UseToken.VALID)); } - @Test - public void testTokenOnlyServerWithInvalidTokens() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testTokenOnlyServerWithInvalidTokens(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); assertAuthEquals(BadToken, getAuthMethod(SIMPLE, TOKEN, UseToken.INVALID)); assertAuthEquals(BadToken, getAuthMethod(KERBEROS, TOKEN, UseToken.INVALID)); enableSecretManager = false; @@ -754,8 +796,11 @@ public void testTokenOnlyServerWithInvalidTokens() throws Exception { /* * kerberos server */ - @Test - public void testKerberosServer() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testKerberosServer(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); // doesn't try SASL assertAuthEquals(Denied(SIMPLE), getAuthMethod(SIMPLE, KERBEROS)); // does try SASL @@ -765,8 +810,11 @@ public void testKerberosServer() throws Exception { assertAuthEquals(KrbFailed, getAuthMethod(KERBEROS, KERBEROS, UseToken.OTHER)); } - @Test - public void testKerberosServerWithTokens() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testKerberosServerWithTokens(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); // can use tokens regardless of auth assertAuthEquals(TOKEN, getAuthMethod(SIMPLE, KERBEROS, UseToken.VALID)); assertAuthEquals(TOKEN, getAuthMethod(KERBEROS, KERBEROS, UseToken.VALID)); @@ -776,8 +824,11 @@ public void testKerberosServerWithTokens() throws Exception { assertAuthEquals(KrbFailed, getAuthMethod(KERBEROS, KERBEROS, UseToken.VALID)); } - @Test - public void testKerberosServerWithInvalidTokens() throws Exception { + @ParameterizedTest + @MethodSource("data") + public void testKerberosServerWithInvalidTokens(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); assertAuthEquals(BadToken, getAuthMethod(SIMPLE, KERBEROS, UseToken.INVALID)); assertAuthEquals(BadToken, getAuthMethod(KERBEROS, KERBEROS, UseToken.INVALID)); enableSecretManager = false; @@ -788,8 +839,12 @@ public void testKerberosServerWithInvalidTokens() throws Exception { // ensure that for all qop settings, client can handle postponed rpc // responses. basically ensures that the rpc server isn't encrypting // and queueing the responses out of order. - @Test(timeout=10000) - public void testSaslResponseOrdering() throws Exception { + @ParameterizedTest + @MethodSource("data") + @Timeout(value = 10) + public void testSaslResponseOrdering(QualityOfProtection[] pQop, + QualityOfProtection pExpectedQop, String pSaslPropertiesResolver) throws Exception { + initTestSaslRPC(pQop, pExpectedQop, pSaslPropertiesResolver); SecurityUtil.setAuthenticationMethod( AuthenticationMethod.TOKEN, conf); UserGroupInformation.setConfiguration(conf); @@ -834,7 +889,7 @@ public Void call() throws Exception { } catch (TimeoutException te) { continue; // expected. } - Assert.fail("future"+i+" did not block"); + fail("future" + i + " did not block"); } // triggers responses to be unblocked in a random order. having // only 1 handler ensures that the prior calls are already diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java index 2011803a4e5a6..85a3cfc7c7cbb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java @@ -18,8 +18,14 @@ package org.apache.hadoop.ipc; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import java.io.IOException; import java.net.BindException; @@ -32,7 +38,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.Server.Call; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import static org.apache.hadoop.test.MockitoUtil.verifyZeroInteractions; @@ -120,7 +127,7 @@ public void testBindError() throws Exception { } finally { socket2.close(); } - assertTrue("Failed to catch the expected bind exception",caught); + assertTrue(caught, "Failed to catch the expected bind exception"); } finally { socket.close(); } @@ -135,7 +142,8 @@ static class TestException2 extends Exception { static class TestException3 extends Exception { } - @Test (timeout=300000) + @Test + @Timeout(value = 300) public void testLogExceptions() throws Exception { final Configuration conf = new Configuration(); final Call dummyCall = new Call(0, 0, null, null); @@ -189,7 +197,8 @@ public void testExceptionsHandlerSuppressed() { assertFalse(handler.isSuppressedLog(RpcClientException.class)); } - @Test (timeout=300000) + @Test + @Timeout(value = 300) public void testPurgeIntervalNanosConf() throws Exception { Configuration conf = new Configuration(); conf.setInt(CommonConfigurationKeysPublic. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java index 1bad29e7750d1..51c66abb3fc26 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java @@ -38,12 +38,13 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.SocksSocketFactory; import org.apache.hadoop.net.StandardSocketFactory; -import org.junit.After; -import org.junit.Test; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; import static org.assertj.core.api.Assertions.assertThat; @@ -76,7 +77,7 @@ private void startTestServer() throws Exception { port = serverRunnable.getPort(); } - @After + @AfterEach public void stopTestServer() throws InterruptedException { final Thread t = serverThread; if (t != null) { @@ -131,7 +132,8 @@ static class DummySocketFactory extends StandardSocketFactory { /** * Test SocksSocketFactory. */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testSocksSocketFactory() throws Exception { startTestServer(); testSocketFactory(new SocksSocketFactory()); @@ -140,7 +142,8 @@ public void testSocksSocketFactory() throws Exception { /** * Test StandardSocketFactory. */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testStandardSocketFactory() throws Exception { startTestServer(); testSocketFactory(new StandardSocketFactory()); @@ -176,7 +179,8 @@ private void testSocketFactory(SocketFactory socketFactory) throws Exception { /** * test proxy methods */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testProxy() throws Exception { SocksSocketFactory templateWithoutProxy = new SocksSocketFactory(); Proxy proxy = new Proxy(Type.SOCKS, InetSocketAddress.createUnresolved( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java index 11e2a9d917a19..5617fae78bec2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java @@ -20,13 +20,14 @@ import static org.assertj.core.api.Assertions.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.conf.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.hadoop.ipc.WeightedRoundRobinMultiplexer.IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY; +import static org.junit.jupiter.api.Assertions.assertThrows; public class TestWeightedRoundRobinMultiplexer { public static final Logger LOG = @@ -34,24 +35,29 @@ public class TestWeightedRoundRobinMultiplexer { private WeightedRoundRobinMultiplexer mux; - @Test(expected=IllegalArgumentException.class) + @Test public void testInstantiateNegativeMux() { - mux = new WeightedRoundRobinMultiplexer(-1, "", new Configuration()); + assertThrows(IllegalArgumentException.class, () -> { + mux = new WeightedRoundRobinMultiplexer(-1, "", new Configuration()); + }); } - @Test(expected=IllegalArgumentException.class) + @Test public void testInstantiateZeroMux() { - mux = new WeightedRoundRobinMultiplexer(0, "", new Configuration()); + assertThrows(IllegalArgumentException.class, () -> { + mux = new WeightedRoundRobinMultiplexer(0, "", new Configuration()); + }); } - @Test(expected=IllegalArgumentException.class) + @Test public void testInstantiateIllegalMux() { - Configuration conf = new Configuration(); - conf.setStrings("namespace." + IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY, - "1", "2", "3"); - - // ask for 3 weights with 2 queues - mux = new WeightedRoundRobinMultiplexer(2, "namespace", conf); + assertThrows(IllegalArgumentException.class, ()->{ + Configuration conf = new Configuration(); + conf.setStrings("namespace." + IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY, + "1", "2", "3"); + // ask for 3 weights with 2 queues + mux = new WeightedRoundRobinMultiplexer(2, "namespace", conf); + }); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedTimeCostProvider.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedTimeCostProvider.java index 4f4a72b99ab4a..c56a971c7d964 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedTimeCostProvider.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedTimeCostProvider.java @@ -21,13 +21,14 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProcessingDetails.Timing; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.ipc.WeightedTimeCostProvider.DEFAULT_LOCKEXCLUSIVE_WEIGHT; import static org.apache.hadoop.ipc.WeightedTimeCostProvider.DEFAULT_LOCKFREE_WEIGHT; import static org.apache.hadoop.ipc.WeightedTimeCostProvider.DEFAULT_LOCKSHARED_WEIGHT; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; /** Tests for {@link WeightedTimeCostProvider}. */ public class TestWeightedTimeCostProvider { @@ -40,7 +41,7 @@ public class TestWeightedTimeCostProvider { private WeightedTimeCostProvider costProvider; private ProcessingDetails processingDetails; - @Before + @BeforeEach public void setup() { costProvider = new WeightedTimeCostProvider(); processingDetails = new ProcessingDetails(TimeUnit.MILLISECONDS); @@ -50,9 +51,11 @@ public void setup() { processingDetails.set(Timing.LOCKEXCLUSIVE, LOCKEXCLUSIVE_TIME); } - @Test(expected = AssertionError.class) + @Test public void testGetCostBeforeInit() { - costProvider.getCost(null); + assertThrows(AssertionError.class, () -> { + costProvider.getCost(null); + }); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestDecayRpcSchedulerDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestDecayRpcSchedulerDetailedMetrics.java index 01d407ba26010..f83f70fdb7131 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestDecayRpcSchedulerDetailedMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestDecayRpcSchedulerDetailedMetrics.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.ipc.metrics; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.DecayRpcScheduler; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestDecayRpcSchedulerDetailedMetrics { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestRpcMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestRpcMetrics.java index 1716433411181..825e785408627 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestRpcMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/metrics/TestRpcMetrics.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.ipc.metrics; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.LongWritable; @@ -27,7 +27,7 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestRpcMetrics { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java index ba7de6f437ee5..3236c0b82b4e5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServlet.java @@ -20,9 +20,9 @@ import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServerFunctionalTest; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @@ -38,20 +38,22 @@ public class TestJMXJsonServlet extends HttpServerFunctionalTest { private static HttpServer2 server; private static URL baseUrl; - @BeforeClass public static void setup() throws Exception { + @BeforeAll + public static void setup() throws Exception { server = createTestServer(); server.start(); baseUrl = getServerURL(server); } - @AfterClass public static void cleanup() throws Exception { + @AfterAll + public static void cleanup() throws Exception { server.stop(); } public static void assertReFind(String re, String value) { Pattern p = Pattern.compile(re); Matcher m = p.matcher(value); - assertTrue("'"+p+"' does not match "+value, m.find()); + assertTrue(m.find(), "'"+p+"' does not match "+value); } @Test public void testQuery() throws Exception { @@ -95,8 +97,8 @@ public void testTraceRequest() throws IOException { HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("TRACE"); - assertEquals("Unexpected response code", - HttpServletResponse.SC_METHOD_NOT_ALLOWED, conn.getResponseCode()); + assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED, conn.getResponseCode(), + "Unexpected response code"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServletNaNFiltered.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServletNaNFiltered.java index 52a52be80a35c..4d0794a24992c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServletNaNFiltered.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/jmx/TestJMXJsonServletNaNFiltered.java @@ -21,9 +21,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpServer2; @@ -35,7 +35,8 @@ public class TestJMXJsonServletNaNFiltered extends HttpServerFunctionalTest { private static HttpServer2 server; private static URL baseUrl; - @BeforeClass public static void setup() throws Exception { + @BeforeAll + public static void setup() throws Exception { Configuration configuration = new Configuration(); configuration.setBoolean(JMX_NAN_FILTER, true); server = createTestServer(configuration); @@ -43,14 +44,15 @@ public class TestJMXJsonServletNaNFiltered extends HttpServerFunctionalTest { baseUrl = getServerURL(server); } - @AfterClass public static void cleanup() throws Exception { + @AfterAll + public static void cleanup() throws Exception { server.stop(); } public static void assertReFind(String re, String value) { Pattern p = Pattern.compile(re); Matcher m = p.matcher(value); - assertTrue("'"+p+"' does not match "+value, m.find()); + assertTrue(m.find(), "'"+p+"' does not match "+value); } @Test public void testQuery() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogThrottlingHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogThrottlingHelper.java index 6c627116f8cb9..ed78351c4be89 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogThrottlingHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogThrottlingHelper.java @@ -19,12 +19,13 @@ import org.apache.hadoop.log.LogThrottlingHelper.LogAction; import org.apache.hadoop.util.FakeTimer; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Tests for {@link LogThrottlingHelper}. @@ -36,7 +37,7 @@ public class TestLogThrottlingHelper { private LogThrottlingHelper helper; private FakeTimer timer; - @Before + @BeforeEach public void setup() { timer = new FakeTimer(); helper = new LogThrottlingHelper(LOG_PERIOD, null, timer); @@ -93,11 +94,13 @@ public void testLoggingWithMultipleValues() { } } - @Test(expected = IllegalArgumentException.class) + @Test public void testLoggingWithInconsistentValues() { - assertTrue(helper.record(1, 2).shouldLog()); - helper.record(1, 2); - helper.record(1, 2, 3); + assertThrows(IllegalArgumentException.class, () -> { + assertTrue(helper.record(1, 2).shouldLog()); + helper.record(1, 2); + helper.record(1, 2, 3); + }); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java index 05724968c29a1..4b9ff239cfc44 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/filter/TestPatternFilter.java @@ -22,10 +22,12 @@ import java.util.List; import org.apache.commons.configuration2.SubsetConfiguration; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsRecord; @@ -129,8 +131,8 @@ public class TestPatternFilter { } static void shouldAccept(SubsetConfiguration conf, String s) { - assertTrue("accepts "+ s, newGlobFilter(conf).accepts(s)); - assertTrue("accepts "+ s, newRegexFilter(conf).accepts(s)); + assertTrue(newGlobFilter(conf).accepts(s), "accepts "+ s); + assertTrue(newRegexFilter(conf).accepts(s), "accepts "+ s); } // Version for one tag: @@ -159,8 +161,8 @@ private static void shouldAcceptImpl(final boolean expectAcceptList, final MetricsFilter regexFilter = newRegexFilter(conf); // Test acceptance of the tag list: - assertEquals("accepts "+ tags, expectAcceptList, globFilter.accepts(tags)); - assertEquals("accepts "+ tags, expectAcceptList, regexFilter.accepts(tags)); + assertEquals(expectAcceptList, globFilter.accepts(tags), "accepts "+ tags); + assertEquals(expectAcceptList, regexFilter.accepts(tags), "accepts "+ tags); // Test results on each of the individual tags: int acceptedCount = 0; @@ -168,7 +170,7 @@ private static void shouldAcceptImpl(final boolean expectAcceptList, MetricsTag tag = tags.get(i); boolean actGlob = globFilter.accepts(tag); boolean actRegex = regexFilter.accepts(tag); - assertEquals("accepts "+tag, expectedAcceptedSpec[i], actGlob); + assertEquals(expectedAcceptedSpec[i], actGlob, "accepts "+tag); // Both the filters should give the same result: assertEquals(actGlob, actRegex); if (actGlob) { @@ -177,10 +179,10 @@ private static void shouldAcceptImpl(final boolean expectAcceptList, } if (expectAcceptList) { // At least one individual tag should be accepted: - assertTrue("No tag of the following accepted: " + tags, acceptedCount > 0); + assertTrue(acceptedCount > 0, "No tag of the following accepted: " + tags); } else { // At least one individual tag should be rejected: - assertTrue("No tag of the following rejected: " + tags, acceptedCount < tags.size()); + assertTrue(acceptedCount < tags.size(), "No tag of the following rejected: " + tags); } } @@ -191,13 +193,13 @@ private static void shouldAcceptImpl(final boolean expectAcceptList, * @param record MetricsRecord to check */ static void shouldAccept(SubsetConfiguration conf, MetricsRecord record) { - assertTrue("accepts " + record, newGlobFilter(conf).accepts(record)); - assertTrue("accepts " + record, newRegexFilter(conf).accepts(record)); + assertTrue(newGlobFilter(conf).accepts(record), "accepts " + record); + assertTrue(newRegexFilter(conf).accepts(record), "accepts " + record); } static void shouldReject(SubsetConfiguration conf, String s) { - assertTrue("rejects "+ s, !newGlobFilter(conf).accepts(s)); - assertTrue("rejects "+ s, !newRegexFilter(conf).accepts(s)); + assertTrue(!newGlobFilter(conf).accepts(s), "rejects "+ s); + assertTrue(!newRegexFilter(conf).accepts(s), "rejects "+ s); } /** @@ -207,8 +209,8 @@ static void shouldReject(SubsetConfiguration conf, String s) { * @param record MetricsRecord to check */ static void shouldReject(SubsetConfiguration conf, MetricsRecord record) { - assertTrue("rejects " + record, !newGlobFilter(conf).accepts(record)); - assertTrue("rejects " + record, !newRegexFilter(conf).accepts(record)); + assertTrue(!newGlobFilter(conf).accepts(record), "rejects " + record); + assertTrue(!newRegexFilter(conf).accepts(record), "rejects " + record); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/ConfigUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/ConfigUtil.java index 1634ea97a8eda..3f109acabb6ec 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/ConfigUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/ConfigUtil.java @@ -21,7 +21,8 @@ import java.io.PrintWriter; import java.util.Iterator; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.commons.configuration2.Configuration; import org.apache.commons.configuration2.PropertiesConfiguration; @@ -54,14 +55,14 @@ static void assertEq(Configuration expected, Configuration actual) { // Check that the actual config contains all the properties of the expected for (Iterator it = expected.getKeys(); it.hasNext();) { String key = (String) it.next(); - assertTrue("actual should contain "+ key, actual.containsKey(key)); - assertEquals("value of "+ key, expected.getProperty(key), - actual.getProperty(key)); + assertTrue(actual.containsKey(key), "actual should contain "+ key); + assertEquals(expected.getProperty(key), + actual.getProperty(key), "value of "+ key); } // Check that the actual config has no extra properties for (Iterator it = actual.getKeys(); it.hasNext();) { String key = (String) it.next(); - assertTrue("expected should contain "+ key, expected.containsKey(key)); + assertTrue(expected.containsKey(key), "expected should contain "+ key); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java index 7bc772f062a37..c1497fce05de3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java @@ -18,8 +18,8 @@ package org.apache.hadoop.metrics2.impl; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.net.DatagramPacket; @@ -44,7 +44,7 @@ import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30; import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31; import org.apache.hadoop.metrics2.sink.ganglia.GangliaMetricsTestHelper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -168,12 +168,11 @@ private void checkMetrics(List bytearrlist, int expectedCount) { for (int index = 0; index < foundMetrics.length; index++) { if (!foundMetrics[index]) { - assertTrue("Missing metrics: " + expectedMetrics[index], false); + assertTrue(false, "Missing metrics: " + expectedMetrics[index]); } } - assertEquals("Mismatch in record count: ", - expectedCount, bytearrlist.size()); + assertEquals(expectedCount, bytearrlist.size(), "Mismatch in record count: "); } @SuppressWarnings("unused") diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java index 89ef794463683..e387382fec4db 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsCollectorImpl.java @@ -18,8 +18,9 @@ package org.apache.hadoop.metrics2.impl; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.commons.configuration2.SubsetConfiguration; import static org.apache.hadoop.metrics2.filter.TestPatternFilter.*; @@ -34,10 +35,10 @@ public class TestMetricsCollectorImpl { mb.setRecordFilter(newGlobFilter(fc)); MetricsRecordBuilderImpl rb = mb.addRecord("foo"); rb.tag(info("foo", ""), "value").addGauge(info("g0", ""), 1); - assertEquals("no tags", 0, rb.tags().size()); - assertEquals("no metrics", 0, rb.metrics().size()); - assertNull("null record", rb.getRecord()); - assertEquals("no records", 0, mb.getRecords().size()); + assertEquals(0, rb.tags().size(), "no tags"); + assertEquals(0, rb.metrics().size(), "no metrics"); + assertNull(rb.getRecord(), "null record"); + assertEquals(0, mb.getRecords().size(), "no records"); } @Test public void testPerMetricFiltering() { @@ -48,9 +49,9 @@ public class TestMetricsCollectorImpl { MetricsRecordBuilderImpl rb = mb.addRecord("foo"); rb.tag(info("foo", ""), "").addCounter(info("c0", ""), 0) .addGauge(info("foo", ""), 1); - assertEquals("1 tag", 1, rb.tags().size()); - assertEquals("1 metric", 1, rb.metrics().size()); - assertEquals("expect foo tag", "foo", rb.tags().get(0).name()); - assertEquals("expect c0", "c0", rb.metrics().get(0).name()); + assertEquals(1, rb.tags().size(), "1 tag"); + assertEquals(1, rb.metrics().size(), "1 metric"); + assertEquals("foo", rb.tags().get(0).name(), "expect foo tag"); + assertEquals("c0", rb.metrics().get(0).name(), "expect c0"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java index 2ca1c8ad2cc35..f670a293cae35 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java @@ -20,8 +20,9 @@ import java.util.Map; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.commons.configuration2.Configuration; import org.slf4j.Logger; @@ -71,11 +72,11 @@ private void testInstances(MetricsConfig c) throws Exception { Map map = c.getInstanceConfigs("t1"); Map map2 = c.getInstanceConfigs("t2"); - assertEquals("number of t1 instances", 2, map.size()); - assertEquals("number of t2 instances", 1, map2.size()); - assertTrue("contains t1 instance i1", map.containsKey("i1")); - assertTrue("contains t1 instance 42", map.containsKey("42")); - assertTrue("contains t2 instance i1", map2.containsKey("i1")); + assertEquals(2, map.size(), "number of t1 instances"); + assertEquals(1, map2.size(), "number of t2 instances"); + assertTrue(map.containsKey("i1"), "contains t1 instance i1"); + assertTrue(map.containsKey("42"), "contains t1 instance 42"); + assertTrue(map2.containsKey("i1"), "contains t2 instance i1"); MetricsConfig t1i1 = map.get("i1"); MetricsConfig t1i42 = map.get("42"); @@ -97,16 +98,16 @@ private void testInstances(MetricsConfig c) throws Exception { LOG.debug("asserting foo == default foo"); // Check default lookups - assertEquals("value of foo in t1 instance i1", "default foo", - t1i1.getString("foo")); - assertEquals("value of bar in t1 instance i1", "p1.t1 default bar", - t1i1.getString("bar")); - assertEquals("value of foo in t1 instance 42", "default foo", - t1i42.getString("foo")); - assertEquals("value of foo in t2 instance i1", "p1.t2.i1.foo", - t2i1.getString("foo")); - assertEquals("value of bar in t2 instance i1", "p1 default bar", - t2i1.getString("bar")); + assertEquals("default foo", t1i1.getString("foo"), + "value of foo in t1 instance i1"); + assertEquals("p1.t1 default bar", t1i1.getString("bar"), + "value of bar in t1 instance i1"); + assertEquals("default foo", t1i42.getString("foo"), + "value of foo in t1 instance 42"); + assertEquals("p1.t2.i1.foo", t2i1.getString("foo"), + "value of foo in t2 instance i1"); + assertEquals("p1 default bar", t2i1.getString("bar"), + "value of bar in t2 instance i1"); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSourceAdapter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSourceAdapter.java index 0dabe468e49e3..2668321d9e7e3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSourceAdapter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSourceAdapter.java @@ -18,7 +18,9 @@ package org.apache.hadoop.metrics2.impl; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; @@ -40,10 +42,9 @@ import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder; import org.apache.hadoop.metrics2.lib.MutableCounterLong; import static org.apache.hadoop.metrics2.lib.Interns.info; -import static org.junit.Assert.assertEquals; import org.apache.log4j.Logger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; @@ -67,7 +68,7 @@ public void testPurgeOldMetrics() throws Exception { for (MBeanAttributeInfo mBeanAttributeInfo : info.getAttributes()) { sawIt |= mBeanAttributeInfo.getName().equals(source.lastKeyName); }; - assertTrue("The last generated metric is not exported to jmx", sawIt); + assertTrue(sawIt, "The last generated metric is not exported to jmx"); Thread.sleep(1000); // skip JMX cache TTL @@ -76,7 +77,7 @@ public void testPurgeOldMetrics() throws Exception { for (MBeanAttributeInfo mBeanAttributeInfo : info.getAttributes()) { sawIt |= mBeanAttributeInfo.getName().equals(source.lastKeyName); }; - assertTrue("The last generated metric is not exported to jmx", sawIt); + assertTrue(sawIt, "The last generated metric is not exported to jmx"); } //generate a new key per each call @@ -198,7 +199,7 @@ public void testMetricCacheUpdateRace() throws Exception { // Let the threads do their work. Thread.sleep(RACE_TEST_RUNTIME); - assertFalse("Hit error", hasError.get()); + assertFalse(hasError.get(), "Hit error"); // cleanup updaterExecutor.shutdownNow(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java index 1e841a686549c..e30f96ad1e4f8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java @@ -24,17 +24,29 @@ import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.stream.StreamSupport; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.invocation.InvocationOnMock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.stubbing.Answer; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; import java.util.function.Supplier; import org.apache.hadoop.thirdparty.com.google.common.collect.Iterables; @@ -42,7 +54,7 @@ import org.apache.commons.configuration2.SubsetConfiguration; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.test.GenericTestUtils; -import static org.apache.hadoop.test.MoreAsserts.*; +import org.apache.hadoop.test.MoreAsserts; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; @@ -63,7 +75,7 @@ /** * Test the MetricsSystemImpl class */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class TestMetricsSystemImpl { private static final Logger LOG = LoggerFactory.getLogger(TestMetricsSystemImpl.class); @@ -128,7 +140,7 @@ List> getMetricValues() { List mr2 = r2.getAllValues(); if (mr1.size() != 0 && mr2.size() != 0) { checkMetricsRecords(mr1); - assertEquals("output", mr1, mr2); + assertEquals(mr1, mr2, "output"); } else if (mr1.size() != 0) { checkMetricsRecords(mr1); } else if (mr2.size() != 0) { @@ -172,7 +184,7 @@ List> getMetricValues() { List mr1 = r1.getAllValues(); List mr2 = r2.getAllValues(); checkMetricsRecords(mr1); - assertEquals("output", mr1, mr2); + assertEquals(mr1, mr2, "output"); } @@ -242,9 +254,9 @@ public void run() { for (Thread t : threads) t.join(); assertEquals(0L, ms.droppedPubAll.value()); - assertTrue(String.join("\n", Arrays.asList(results)), - Arrays.asList(results).stream().allMatch( - input -> input.equalsIgnoreCase("Passed"))); + assertTrue(Arrays.asList(results).stream().allMatch( + input -> input.equalsIgnoreCase("Passed")), + String.join("\n", Arrays.asList(results))); ms.stop(); ms.shutdown(); } @@ -304,8 +316,8 @@ public void flush() { ms.stop(); ms.shutdown(); assertTrue(hanging.getInterrupted()); - assertTrue("The sink didn't get called after its first hang " + - "for subsequent records.", hanging.getGotCalledSecondTime()); + assertTrue(hanging.getGotCalledSecondTime(), + "The sink didn't get called after its first hang for subsequent records."); } private static class HangingSink implements MetricsSink { @@ -360,11 +372,14 @@ public void flush() { ms.shutdown(); } - @Test(expected=MetricsException.class) public void testRegisterDupError() { - MetricsSystem ms = new MetricsSystemImpl("test"); - TestSource ts = new TestSource("ts"); - ms.register(ts); - ms.register(ts); + @Test + public void testRegisterDupError() { + assertThrows(MetricsException.class, () -> { + MetricsSystem ms = new MetricsSystemImpl("test"); + TestSource ts = new TestSource("ts"); + ms.register(ts); + ms.register(ts); + }); } @Test public void testStartStopStart() { @@ -424,11 +439,11 @@ public void flush() { private void checkMetricsRecords(List recs) { LOG.debug(recs.toString()); MetricsRecord r = recs.get(0); - assertEquals("name", "s1rec", r.name()); - assertEquals("tags", new MetricsTag[] { + assertEquals("s1rec", r.name(), "name"); + MoreAsserts.assertEquals("tags", new MetricsTag[] { tag(MsInfo.Context, "test"), tag(MsInfo.Hostname, hostname)}, r.tags()); - assertEquals("metrics", MetricsLists.builder("") + MoreAsserts.assertEquals("metrics", MetricsLists.builder("") .addCounter(info("C1", "C1 desc"), 1L) .addGauge(info("G1", "G1 desc"), 2L) .addCounter(info("S1NumOps", "Number of ops for s1"), 1L) @@ -436,10 +451,10 @@ private void checkMetricsRecords(List recs) { .metrics(), r.metrics()); r = recs.get(1); - assertTrue("NumActiveSinks should be 3", Iterables.contains(r.metrics(), - new MetricGaugeInt(MsInfo.NumActiveSinks, 3))); - assertTrue("NumAllSinks should be 3", - Iterables.contains(r.metrics(), new MetricGaugeInt(MsInfo.NumAllSinks, 3))); + assertTrue(Iterables.contains(r.metrics(), new MetricGaugeInt(MsInfo.NumActiveSinks, 3)), + "NumActiveSinks should be 3"); + assertTrue(Iterables.contains(r.metrics(), new MetricGaugeInt(MsInfo.NumAllSinks, 3)), + "NumAllSinks should be 3"); } @Test @@ -526,7 +541,8 @@ public void flush() { /** * HADOOP-11932 */ - @Test(timeout = 5000) + @Test + @Timeout(value = 5) public void testHangOnSinkRead() throws Exception { new ConfigBuilder().add("*.period", 8) .add("test.sink.test.class", TestSink.class.getName()) @@ -641,13 +657,13 @@ public void testMetricSystemRestart() { try { ms.start(); ms.register(sinkName, "", ts); - assertNotNull("no adapter exists for " + sinkName, - ms.getSinkAdapter(sinkName)); + assertNotNull(ms.getSinkAdapter(sinkName), + "no adapter exists for " + sinkName); ms.stop(); ms.start(); - assertNotNull("no adapter exists for " + sinkName, - ms.getSinkAdapter(sinkName)); + assertNotNull(ms.getSinkAdapter(sinkName), + "no adapter exists for " + sinkName); } finally { ms.stop(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java index d7614d2d0b2f4..b515e0f46a346 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsVisitor.java @@ -20,25 +20,28 @@ import java.util.List; -import org.junit.Test; -import static org.junit.Assert.*; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; -import org.mockito.junit.MockitoJUnitRunner; import org.apache.hadoop.metrics2.MetricsVisitor; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsInfo; import static org.apache.hadoop.metrics2.lib.Interns.*; import org.apache.hadoop.metrics2.lib.MetricsRegistry; +import org.mockito.junit.jupiter.MockitoExtension; /** * Test the metric visitor interface */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class TestMetricsVisitor { @Captor private ArgumentCaptor c1; @Captor private ArgumentCaptor c2; @@ -67,23 +70,23 @@ public class TestMetricsVisitor { } verify(visitor).counter(c1.capture(), eq(1)); - assertEquals("c1 name", "c1", c1.getValue().name()); - assertEquals("c1 description", "int counter", c1.getValue().description()); + assertEquals("c1", c1.getValue().name(), "c1 name"); + assertEquals("int counter", c1.getValue().description(), "c1 description"); verify(visitor).counter(c2.capture(), eq(2L)); - assertEquals("c2 name", "c2", c2.getValue().name()); - assertEquals("c2 description", "long counter", c2.getValue().description()); + assertEquals("c2", c2.getValue().name(), "c2 name"); + assertEquals("long counter", c2.getValue().description(), "c2 description"); verify(visitor).gauge(g1.capture(), eq(5)); - assertEquals("g1 name", "g1", g1.getValue().name()); - assertEquals("g1 description", "int gauge", g1.getValue().description()); + assertEquals("g1", g1.getValue().name(), "g1 name"); + assertEquals("int gauge", g1.getValue().description(), "g1 description"); verify(visitor).gauge(g2.capture(), eq(6L)); - assertEquals("g2 name", "g2", g2.getValue().name()); - assertEquals("g2 description", "long gauge", g2.getValue().description()); + assertEquals("g2", g2.getValue().name(), "g2 name"); + assertEquals("long gauge", g2.getValue().description(), "g2 description"); verify(visitor).gauge(g3.capture(), eq(7f)); - assertEquals("g3 name", "g3", g3.getValue().name()); - assertEquals("g3 description", "float gauge", g3.getValue().description()); + assertEquals("g3", g3.getValue().name(), "g3 name"); + assertEquals("float gauge", g3.getValue().description(), "g3 description"); verify(visitor).gauge(g4.capture(), eq(8d)); - assertEquals("g4 name", "g4", g4.getValue().name()); - assertEquals("g4 description", "double gauge", g4.getValue().description()); + assertEquals("g4", g4.getValue().name(), "g4 name"); + assertEquals("double gauge", g4.getValue().description(), "g4 description"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java index 719130f5ba910..33f9946e94d9e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java @@ -21,12 +21,17 @@ import java.util.ConcurrentModificationException; import java.util.concurrent.CountDownLatch; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.apache.hadoop.metrics2.impl.SinkQueue.*; @@ -44,21 +49,21 @@ public class TestSinkQueue { @Test public void testCommon() throws Exception { final SinkQueue q = new SinkQueue(2); q.enqueue(1); - assertEquals("queue front", 1, (int) q.front()); - assertEquals("queue back", 1, (int) q.back()); - assertEquals("element", 1, (int) q.dequeue()); + assertEquals(1, (int) q.front(), "queue front"); + assertEquals(1, (int) q.back(), "queue back"); + assertEquals(1, (int) q.dequeue(), "element"); - assertTrue("should enqueue", q.enqueue(2)); + assertTrue(q.enqueue(2), "should enqueue"); q.consume(new Consumer() { @Override public void consume(Integer e) { - assertEquals("element", 2, (int) e); + assertEquals(2, (int) e, "element"); } }); - assertTrue("should enqueue", q.enqueue(3)); - assertEquals("element", 3, (int) q.dequeue()); - assertEquals("queue size", 0, q.size()); - assertEquals("queue front", null, q.front()); - assertEquals("queue back", null, q.back()); + assertTrue(q.enqueue(3), "should enqueue"); + assertEquals(3, (int) q.dequeue(), "element"); + assertEquals(0, q.size(), "queue size"); + assertEquals(null, q.front(), "queue front"); + assertEquals(null, q.back(), "queue back"); } /** @@ -77,10 +82,10 @@ private void testEmptyBlocking(int awhile) throws Exception { Thread t = new Thread() { @Override public void run() { try { - assertEquals("element", 1, (int) q.dequeue()); + assertEquals(1, (int) q.dequeue(), "element"); q.consume(new Consumer() { @Override public void consume(Integer e) { - assertEquals("element", 2, (int) e); + assertEquals(2, (int) e, "element"); trigger.run(); } }); @@ -109,16 +114,16 @@ private void testEmptyBlocking(int awhile) throws Exception { final SinkQueue q = new SinkQueue(1); q.enqueue(1); - assertTrue("should drop", !q.enqueue(2)); - assertEquals("element", 1, (int) q.dequeue()); + assertTrue(!q.enqueue(2), "should drop"); + assertEquals(1, (int) q.dequeue(), "element"); q.enqueue(3); q.consume(new Consumer() { @Override public void consume(Integer e) { - assertEquals("element", 3, (int) e); + assertEquals(3, (int) e, "element"); } }); - assertEquals("queue size", 0, q.size()); + assertEquals(0, q.size(), "queue size"); } /** @@ -130,15 +135,15 @@ private void testEmptyBlocking(int awhile) throws Exception { final SinkQueue q = new SinkQueue(capacity); for (int i = 0; i < capacity; ++i) { - assertTrue("should enqueue", q.enqueue(i)); + assertTrue(q.enqueue(i), "should enqueue"); } - assertTrue("should not enqueue", !q.enqueue(capacity)); + assertTrue(!q.enqueue(capacity), "should not enqueue"); final Runnable trigger = mock(Runnable.class); q.consumeAll(new Consumer() { private int expected = 0; @Override public void consume(Integer e) { - assertEquals("element", expected++, (int) e); + assertEquals(expected++, (int) e, "element"); trigger.run(); } }); @@ -163,11 +168,11 @@ private void testEmptyBlocking(int awhile) throws Exception { }); } catch (Exception expected) { - assertSame("consumer exception", ex, expected); + assertSame(ex, expected, "consumer exception"); } // The queue should be in consistent state after exception - assertEquals("queue size", 1, q.size()); - assertEquals("element", 1, (int) q.dequeue()); + assertEquals(1, q.size(), "queue size"); + assertEquals(1, (int) q.dequeue(), "element"); } /** @@ -178,9 +183,9 @@ private void testEmptyBlocking(int awhile) throws Exception { for (int i = 0; i < q.capacity() + 97; ++i) { q.enqueue(i); } - assertEquals("queue size", q.capacity(), q.size()); + assertEquals(q.capacity(), q.size(), "queue size"); q.clear(); - assertEquals("queue size", 0, q.size()); + assertEquals(0, q.size(), "queue size"); } /** @@ -189,11 +194,11 @@ private void testEmptyBlocking(int awhile) throws Exception { */ @Test public void testHangingConsumer() throws Exception { SinkQueue q = newSleepingConsumerQueue(2, 1, 2); - assertEquals("queue back", 2, (int) q.back()); - assertTrue("should drop", !q.enqueue(3)); // should not block - assertEquals("queue size", 2, q.size()); - assertEquals("queue head", 1, (int) q.front()); - assertEquals("queue back", 2, (int) q.back()); + assertEquals(2, (int) q.back(), "queue back"); + assertTrue(!q.enqueue(3), "should drop"); // should not block + assertEquals(2, q.size(), "queue size"); + assertEquals(1, (int) q.front(), "queue head"); + assertEquals(2, (int) q.back(), "queue back"); } /** @@ -202,9 +207,9 @@ private void testEmptyBlocking(int awhile) throws Exception { */ @Test public void testConcurrentConsumers() throws Exception { final SinkQueue q = newSleepingConsumerQueue(2, 1); - assertTrue("should enqueue", q.enqueue(2)); - assertEquals("queue back", 2, (int) q.back()); - assertTrue("should drop", !q.enqueue(3)); // should not block + assertTrue(q.enqueue(2), "should enqueue"); + assertEquals(2, (int) q.back(), "queue back"); + assertTrue(!q.enqueue(3), "should drop"); // should not block shouldThrowCME(new Fun() { @Override public void run() { q.clear(); @@ -226,9 +231,9 @@ private void testEmptyBlocking(int awhile) throws Exception { } }); // The queue should still be in consistent state after all the exceptions - assertEquals("queue size", 2, q.size()); - assertEquals("queue front", 1, (int) q.front()); - assertEquals("queue back", 2, (int) q.back()); + assertEquals(2, q.size(), "queue size"); + assertEquals(1, (int) q.front(), "queue front"); + assertEquals(2, (int) q.back(), "queue back"); } private void shouldThrowCME(Fun callback) throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestInterns.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestInterns.java index 74d073d826e3b..f5e463894144d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestInterns.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestInterns.java @@ -18,8 +18,9 @@ package org.apache.hadoop.metrics2.lib; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertSame; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsTag; @@ -29,12 +30,12 @@ public class TestInterns { @Test public void testInfo() { MetricsInfo info = info("m", "m desc"); - assertSame("same info", info, info("m", "m desc")); + assertSame(info, info("m", "m desc"), "same info"); } @Test public void testTag() { MetricsTag tag = tag("t", "t desc", "t value"); - assertSame("same tag", tag, tag("t", "t desc", "t value")); + assertSame(tag, tag("t", "t desc", "t value"), "same tag"); } @Test public void testInfoOverflow() { @@ -42,19 +43,19 @@ public class TestInterns { for (int i = 0; i < MAX_INFO_NAMES + 1; ++i) { info("m"+ i, "m desc"); if (i < MAX_INFO_NAMES) { - assertSame("m0 is still there", i0, info("m0", "m desc")); + assertSame(i0, info("m0", "m desc"), "m0 is still there"); } } - assertNotSame("m0 is gone", i0, info("m0", "m desc")); + assertNotSame(i0, info("m0", "m desc"), "m0 is gone"); MetricsInfo i1 = info("m1", "m desc"); for (int i = 0; i < MAX_INFO_DESCS; ++i) { info("m1", "m desc"+ i); if (i < MAX_INFO_DESCS - 1) { - assertSame("i1 is still there", i1, info("m1", "m desc")); + assertSame(i1, info("m1", "m desc"), "i1 is still there"); } } - assertNotSame("i1 is gone", i1, info("m1", "m desc")); + assertNotSame(i1, info("m1", "m desc"), "i1 is gone"); } @Test public void testTagOverflow() { @@ -62,18 +63,18 @@ public class TestInterns { for (int i = 0; i < MAX_TAG_NAMES + 1; ++i) { tag("t"+ i, "t desc", "t value"); if (i < MAX_TAG_NAMES) { - assertSame("t0 still there", t0, tag("t0", "t desc", "t value")); + assertSame(t0, tag("t0", "t desc", "t value"), "t0 still there"); } } - assertNotSame("t0 is gone", t0, tag("t0", "t desc", "t value")); + assertNotSame(t0, tag("t0", "t desc", "t value"), "t0 is gone"); MetricsTag t1 = tag("t1", "t desc", "t value"); for (int i = 0; i < MAX_TAG_VALUES; ++i) { tag("t1", "t desc", "t value"+ i); if (i < MAX_TAG_VALUES -1) { - assertSame("t1 is still there", t1, tag("t1", "t desc", "t value")); + assertSame(t1, tag("t1", "t desc", "t value"), "t1 is still there"); } } - assertNotSame("t1 is gone", t1, tag("t1", "t desc", "t value")); + assertNotSame(t1, tag("t1", "t desc", "t value"), "t1 is gone"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java index 00c216590a8c5..b924c2af0ea38 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java @@ -18,9 +18,10 @@ package org.apache.hadoop.metrics2.lib; -import org.junit.Test; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.verify; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; @@ -79,8 +80,10 @@ static class BadMetrics { @Metric Integer i0; } - @Test(expected=MetricsException.class) public void testBadFields() { - MetricsAnnotations.makeSource(new BadMetrics()); + @Test + public void testBadFields() { + assertThrows(MetricsException.class, () -> + MetricsAnnotations.makeSource(new BadMetrics())); } static class MyMetrics2 { @@ -111,18 +114,20 @@ static class BadMetrics2 { @Metric int foo(int i) { return i; } } - @Test(expected=IllegalArgumentException.class) + @Test public void testBadMethodWithArgs() { - MetricsAnnotations.makeSource(new BadMetrics2()); + assertThrows(IllegalArgumentException.class, + ()-> MetricsAnnotations.makeSource(new BadMetrics2())); } static class BadMetrics3 { @Metric boolean foo() { return true; } } - @Test(expected=MetricsException.class) + @Test public void testBadMethodReturnType() { - MetricsAnnotations.makeSource(new BadMetrics3()); + assertThrows(MetricsException.class, + ()-> MetricsAnnotations.makeSource(new BadMetrics3())); } @Metrics(about="My metrics", context="foo") @@ -191,15 +196,19 @@ public void getMetrics(MetricsCollector collector, boolean all) { } } - @Test(expected=MetricsException.class) public void testBadHybrid() { - MetricsAnnotations.makeSource(new BadHybridMetrics()); + @Test + public void testBadHybrid() { + assertThrows(MetricsException.class, + ()-> MetricsAnnotations.makeSource(new BadHybridMetrics())); } static class EmptyMetrics { int foo; } - @Test(expected=MetricsException.class) public void testEmptyMetrics() { - MetricsAnnotations.makeSource(new EmptyMetrics()); + @Test + public void testEmptyMetrics() { + assertThrows(MetricsException.class, ()-> + MetricsAnnotations.makeSource(new EmptyMetrics())); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java index 73ea43f69adb4..ef1c6581bccf7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java @@ -18,10 +18,12 @@ package org.apache.hadoop.metrics2.lib; -import org.junit.Ignore; -import org.junit.Test; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.verify; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; @@ -45,13 +47,13 @@ public class TestMetricsRegistry { r.newGauge("g3", "g3 desc", 5f); r.newStat("s1", "s1 desc", "ops", "time"); - assertEquals("num metrics in registry", 6, r.metrics().size()); - assertTrue("c1 found", r.get("c1") instanceof MutableCounterInt); - assertTrue("c2 found", r.get("c2") instanceof MutableCounterLong); - assertTrue("g1 found", r.get("g1") instanceof MutableGaugeInt); - assertTrue("g2 found", r.get("g2") instanceof MutableGaugeLong); - assertTrue("g3 found", r.get("g3") instanceof MutableGaugeFloat); - assertTrue("s1 found", r.get("s1") instanceof MutableStat); + assertEquals(6, r.metrics().size(), "num metrics in registry"); + assertTrue(r.get("c1") instanceof MutableCounterInt, "c1 found"); + assertTrue(r.get("c2") instanceof MutableCounterLong, "c2 found"); + assertTrue(r.get("g1") instanceof MutableGaugeInt, "g1 found"); + assertTrue(r.get("g2") instanceof MutableGaugeLong, "g2 found"); + assertTrue(r.get("g3") instanceof MutableGaugeFloat, "g3 found"); + assertTrue(r.get("s1") instanceof MutableStat, "s1 found"); expectMetricsException("Metric name c1 already exists", new Runnable() { @Override @@ -96,7 +98,7 @@ public void testMetricsRegistryIllegalMetricNames() { public void run() { r.newCounter("withnewline6\n", "c6 desc", 6); } }); // Final validation - assertEquals("num metrics in registry", 3, r.metrics().size()); + assertEquals(3, r.metrics().size(), "num metrics in registry"); } /** @@ -140,13 +142,13 @@ public void run() { }); } - @Ignore + @Disabled private void expectMetricsException(String prefix, Runnable fun) { try { fun.run(); } catch (MetricsException e) { - assertTrue("expected exception", e.getMessage().startsWith(prefix)); + assertTrue(e.getMessage().startsWith(prefix), "expected exception"); return; } fail("should've thrown '"+ prefix +"...'"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java index 1ebc0cbdbf23d..f423b57d1c3e2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java @@ -21,6 +21,7 @@ import static org.apache.hadoop.metrics2.impl.MsInfo.Context; import static org.apache.hadoop.metrics2.lib.Interns.info; import static org.apache.hadoop.test.MetricsAsserts.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.AdditionalMatchers.eq; import static org.mockito.AdditionalMatchers.geq; import static org.mockito.AdditionalMatchers.leq; @@ -28,7 +29,6 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.junit.Assert.*; import java.util.ArrayList; import java.util.List; @@ -40,7 +40,8 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.util.Quantile; import org.apache.hadoop.thirdparty.com.google.common.math.Stats; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -264,8 +265,8 @@ public void run() { // the totals are as expected snapshotMutableRatesWithAggregation(rates, opCount, opTotalTime); for (int i = 0; i < n; i++) { - assertEquals("metric" + i + " count", 1001, opCount[i]); - assertEquals("metric" + i + " total", 1500, opTotalTime[i], 1.0); + assertEquals(1001, opCount[i], "metric" + i + " count"); + assertEquals(1500, opTotalTime[i], 1.0, "metric" + i + " total"); } firstSnapshotsFinished.countDown(); @@ -274,8 +275,8 @@ public void run() { secondAddsFinished.await(); snapshotMutableRatesWithAggregation(rates, opCount, opTotalTime); for (int i = 0; i < n; i++) { - assertEquals("metric" + i + " count", 1501, opCount[i]); - assertEquals("metric" + i + " total", 2250, opTotalTime[i], 1.0); + assertEquals(1501, opCount[i], "metric" + i + " count"); + assertEquals(2250, opTotalTime[i], 1.0, "metric" + i + " total"); } secondSnapshotsFinished.countDown(); } @@ -402,7 +403,8 @@ public void testLargeMutableStatAdd() { * Ensure that quantile estimates from {@link MutableQuantiles} are within * specified error bounds. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableQuantilesError() throws Exception { MetricsRecordBuilder mb = mockMetricsRecordBuilder(); MetricsRegistry registry = new MetricsRegistry("test"); @@ -448,7 +450,8 @@ public void testMutableQuantilesError() throws Exception { * Ensure that quantile estimates from {@link MutableInverseQuantiles} are within * specified error bounds. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableInverseQuantilesError() throws Exception { MetricsRecordBuilder mb = mockMetricsRecordBuilder(); MetricsRegistry registry = new MetricsRegistry("test"); @@ -488,7 +491,8 @@ public void testMutableInverseQuantilesError() throws Exception { * Test that {@link MutableQuantiles} rolls the window over at the specified * interval. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableQuantilesRollover() throws Exception { MetricsRecordBuilder mb = mockMetricsRecordBuilder(); MetricsRegistry registry = new MetricsRegistry("test"); @@ -536,7 +540,8 @@ public void testMutableQuantilesRollover() throws Exception { * Test that {@link MutableInverseQuantiles} rolls the window over at the specified * interval. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableInverseQuantilesRollover() throws Exception { MetricsRecordBuilder mb = mockMetricsRecordBuilder(); MetricsRegistry registry = new MetricsRegistry("test"); @@ -585,7 +590,8 @@ public void testMutableInverseQuantilesRollover() throws Exception { * Test that {@link MutableQuantiles} rolls over correctly even if no items. * have been added to the window */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableQuantilesEmptyRollover() throws Exception { MetricsRecordBuilder mb = mockMetricsRecordBuilder(); MetricsRegistry registry = new MetricsRegistry("test"); @@ -607,7 +613,8 @@ public void testMutableQuantilesEmptyRollover() throws Exception { * Test that {@link MutableInverseQuantiles} rolls over correctly even if no items * have been added to the window */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableInverseQuantilesEmptyRollover() throws Exception { MetricsRecordBuilder mb = mockMetricsRecordBuilder(); MetricsRegistry registry = new MetricsRegistry("test"); @@ -628,7 +635,8 @@ public void testMutableInverseQuantilesEmptyRollover() throws Exception { /** * Test {@link MutableGaugeFloat#incr()}. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableGaugeFloat() { MutableGaugeFloat mgf = new MutableGaugeFloat(Context, 3.2f); assertEquals(3.2f, mgf.value(), 0.0); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableRollingAverages.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableRollingAverages.java index ad90c1860514a..2ec70c486dd57 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableRollingAverages.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableRollingAverages.java @@ -23,17 +23,20 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.apache.hadoop.metrics2.lib.Interns.info; import static org.apache.hadoop.test.MetricsAsserts.*; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.anyDouble; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; /** * This class tests various cases of the algorithms implemented in @@ -45,7 +48,8 @@ public class TestMutableRollingAverages { * Tests if the results are correct if no samples are inserted, dry run of * empty roll over. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testRollingAveragesEmptyRollover() throws Exception { final MetricsRecordBuilder rb = mockMetricsRecordBuilder(); /* 5s interval and 2 windows */ @@ -79,7 +83,8 @@ public void testRollingAveragesEmptyRollover() throws Exception { * 2...2] and [3, 3...3] *

*/ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testRollingAveragesRollover() throws Exception { final MetricsRecordBuilder rb = mockMetricsRecordBuilder(); final String name = "foo2"; @@ -135,7 +140,8 @@ public void testRollingAveragesRollover() throws Exception { * initialization. * @throws Exception */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testMutableRollingAveragesMetric() throws Exception { DummyTestMetric testMetric = new DummyTestMetric(); testMetric.create(); @@ -157,10 +163,8 @@ public Boolean get() { double metric1Avg = getDoubleGauge("[Metric1]RollingAvgTesting", rb); double metric2Avg = getDoubleGauge("[Metric2]RollingAvgTesting", rb); - Assert.assertTrue("The rolling average of metric1 is not as expected", - metric1Avg == 500.0); - Assert.assertTrue("The rolling average of metric2 is not as expected", - metric2Avg == 1000.0); + assertTrue(metric1Avg == 500.0, "The rolling average of metric1 is not as expected"); + assertTrue(metric2Avg == 1000.0, "The rolling average of metric2 is not as expected"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestUniqNames.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestUniqNames.java index fb09ed2465e7d..94b13f0ec47ed 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestUniqNames.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestUniqNames.java @@ -18,8 +18,8 @@ package org.apache.hadoop.metrics2.lib; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestUniqNames { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java index 420c16bef577e..3b4c80ed1b685 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java @@ -35,9 +35,10 @@ import org.apache.hadoop.metrics2.impl.MetricsSystemImpl; import org.apache.hadoop.metrics2.impl.TestMetricsConfig; import org.apache.hadoop.metrics2.lib.MutableGaugeInt; -import org.junit.After; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestFileSink { @@ -81,7 +82,8 @@ private File getTestTempFile(String prefix, String suffix) throws IOException { return File.createTempFile(prefix, suffix, dir); } - @Test(timeout=6000) + @Test + @Timeout(value = 6) public void testFileSink() throws IOException { outFile = getTestTempFile("test-file-sink-", ".out"); final String outPath = outFile.getAbsolutePath(); @@ -136,7 +138,7 @@ public void testFileSink() throws IOException { assertTrue(expectedContentPattern.matcher(outFileContent).matches()); } - @After + @AfterEach public void after() { if (outFile != null) { outFile.delete(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestGraphiteMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestGraphiteMetrics.java index 9ea81c6e4c62e..51d51bed1173e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestGraphiteMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestGraphiteMetrics.java @@ -23,7 +23,7 @@ import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.impl.MetricsRecordImpl; import org.apache.hadoop.metrics2.impl.MsInfo; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import java.io.IOException; @@ -33,7 +33,7 @@ import java.util.Set; import java.util.Collections; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestPrometheusMetricsSink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestPrometheusMetricsSink.java index 50c77e135ec40..5abdff6c142c0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestPrometheusMetricsSink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestPrometheusMetricsSink.java @@ -35,11 +35,13 @@ import org.apache.hadoop.metrics2.lib.Interns; import org.apache.hadoop.metrics2.lib.MutableCounterLong; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test prometheus Sink. @@ -69,11 +71,8 @@ public void testPublish() throws IOException { //THEN String writtenMetrics = stream.toString(UTF_8.name()); System.out.println(writtenMetrics); - Assert.assertTrue( - "The expected metric line is missing from prometheus metrics output", - writtenMetrics.contains( - "test_metrics_num_bucket_create_fails{context=\"dfs\"") - ); + assertTrue(writtenMetrics.contains("test_metrics_num_bucket_create_fails{context=\"dfs\""), + "The expected metric line is missing from prometheus metrics output"); metrics.unregisterSource("TestMetrics"); metrics.stop(); @@ -110,16 +109,12 @@ public void testPublishMultiple() throws IOException { //THEN String writtenMetrics = stream.toString(UTF_8.name()); System.out.println(writtenMetrics); - Assert.assertTrue( - "The expected first metric line is missing from prometheus metrics output", - writtenMetrics.contains( - "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue1\"") - ); - Assert.assertTrue( - "The expected second metric line is missing from prometheus metrics output", - writtenMetrics.contains( - "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue2\"") - ); + assertTrue(writtenMetrics.contains( + "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue1\""), + "The expected first metric line is missing from prometheus metrics output"); + assertTrue(writtenMetrics.contains( + "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue2\""), + "The expected second metric line is missing from prometheus metrics output"); metrics.unregisterSource("TestMetrics1"); metrics.unregisterSource("TestMetrics2"); @@ -161,16 +156,12 @@ public void testPublishFlush() throws IOException { //THEN String writtenMetrics = stream.toString(UTF_8.name()); System.out.println(writtenMetrics); - Assert.assertFalse( - "The first metric should not exist after flushing", - writtenMetrics.contains( - "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue1\"") - ); - Assert.assertTrue( - "The expected metric line is missing from prometheus metrics output", - writtenMetrics.contains( - "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue2\"") - ); + assertFalse(writtenMetrics.contains( + "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue1\""), + "The first metric should not exist after flushing"); + assertTrue(writtenMetrics.contains( + "test_metrics_num_bucket_create_fails{context=\"dfs\",testtag=\"testTagValue2\""), + "The expected metric line is missing from prometheus metrics output"); metrics.unregisterSource("TestMetrics"); metrics.stop(); @@ -181,13 +172,13 @@ public void testPublishFlush() throws IOException { public void testNamingCamelCase() { PrometheusMetricsSink sink = new PrometheusMetricsSink(); - Assert.assertEquals("rpc_time_some_metrics", + assertEquals("rpc_time_some_metrics", sink.prometheusName("RpcTime", "SomeMetrics")); - Assert.assertEquals("om_rpc_time_om_info_keys", + assertEquals("om_rpc_time_om_info_keys", sink.prometheusName("OMRpcTime", "OMInfoKeys")); - Assert.assertEquals("rpc_time_small", + assertEquals("rpc_time_small", sink.prometheusName("RpcTime", "small")); } @@ -198,7 +189,7 @@ public void testNamingPipeline() { String recordName = "SCMPipelineMetrics"; String metricName = "NumBlocksAllocated-" + "RATIS-THREE-47659e3d-40c9-43b3-9792-4982fc279aba"; - Assert.assertEquals( + assertEquals( "scm_pipeline_metrics_" + "num_blocks_allocated_" + "ratis_three_47659e3d_40c9_43b3_9792_4982fc279aba", @@ -211,7 +202,7 @@ public void testNamingPeriods() { String recordName = "org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl"; String metricName = "DfsUsed"; - Assert.assertEquals( + assertEquals( "org_apache_hadoop_hdfs_server_datanode_fsdataset_impl_fs_dataset_impl_dfs_used", sink.prometheusName(recordName, metricName)); } @@ -222,7 +213,7 @@ public void testNamingWhitespaces() { String recordName = "JvmMetrics"; String metricName = "GcCount" + "G1 Old Generation"; - Assert.assertEquals( + assertEquals( "jvm_metrics_gc_count_g1_old_generation", sink.prometheusName(recordName, metricName)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestRollingFileSystemSink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestRollingFileSystemSink.java index ac5a0be75eb17..c7623f3db7516 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestRollingFileSystemSink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestRollingFileSystemSink.java @@ -24,11 +24,11 @@ import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.impl.ConfigBuilder; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * Test that the init() method picks up all the configuration settings @@ -51,18 +51,15 @@ public void testInit() { sink.init(conf); - assertEquals("The roll interval was not set correctly", - sink.rollIntervalMillis, 600000); - assertEquals("The roll offset interval was not set correctly", - sink.rollOffsetIntervalMillis, 1); - assertEquals("The base path was not set correctly", - sink.basePath, new Path("path")); - assertEquals("ignore-error was not set correctly", - sink.ignoreError, true); - assertEquals("allow-append was not set correctly", - sink.allowAppend, true); - assertEquals("The source was not set correctly", - sink.source, "src"); + assertEquals(sink.rollIntervalMillis, 600000, + "The roll interval was not set correctly"); + assertEquals(sink.rollOffsetIntervalMillis, 1, + "The roll offset interval was not set correctly"); + assertEquals(sink.basePath, new Path("path"), + "The base path was not set correctly"); + assertEquals(sink.ignoreError, true, "ignore-error was not set correctly"); + assertEquals(sink.allowAppend, true, "allow-append was not set correctly"); + assertEquals(sink.source, "src", "The source was not set correctly"); } /** @@ -80,36 +77,36 @@ public void testSetInitialFlushTime() { calendar.set(Calendar.DAY_OF_YEAR, 1); calendar.set(Calendar.YEAR, 2016); - assertNull("Last flush time should have been null prior to calling init()", - rfsSink.nextFlush); + assertNull( + rfsSink.nextFlush, "Last flush time should have been null prior to calling init()"); rfsSink.setInitialFlushTime(calendar.getTime()); long diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertEquals("The initial flush time was calculated incorrectly", 0L, diff); + assertEquals(0L, diff, "The initial flush time was calculated incorrectly"); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertEquals("The initial flush time was calculated incorrectly", - -10L, diff); + assertEquals( + -10L, diff, "The initial flush time was calculated incorrectly"); calendar.set(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertEquals("The initial flush time was calculated incorrectly", - -10L, diff); + assertEquals( + -10L, diff, "The initial flush time was calculated incorrectly"); // Try again with a random offset rfsSink = new RollingFileSystemSink(1000, 100); - assertNull("Last flush time should have been null prior to calling init()", - rfsSink.nextFlush); + assertNull( + rfsSink.nextFlush, "Last flush time should have been null prior to calling init()"); calendar.set(Calendar.MILLISECOND, 0); calendar.set(Calendar.SECOND, 0); @@ -117,29 +114,29 @@ public void testSetInitialFlushTime() { diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertTrue("The initial flush time was calculated incorrectly: " + diff, - (diff == 0L) || ((diff > -1000L) && (diff < -900L))); + assertTrue((diff == 0L) || ((diff > -1000L) && (diff < -900L)), + "The initial flush time was calculated incorrectly: " + diff); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertTrue("The initial flush time was calculated incorrectly: " + diff, - (diff >= -10L) && (diff <= 0L) || ((diff > -1000L) && (diff < -910L))); + assertTrue((diff >= -10L) && (diff <= 0L) || ((diff > -1000L) && (diff < -910L)), + "The initial flush time was calculated incorrectly: " + diff); calendar.set(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertTrue("The initial flush time was calculated incorrectly: " + diff, - (diff >= -10L) && (diff <= 0L) || ((diff > -1000L) && (diff < -910L))); + assertTrue((diff >= -10L) && (diff <= 0L) || ((diff > -1000L) && (diff < -910L)), + "The initial flush time was calculated incorrectly: " + diff); // Now try pathological settings rfsSink = new RollingFileSystemSink(1000, 1000000); - assertNull("Last flush time should have been null prior to calling init()", - rfsSink.nextFlush); + assertNull(rfsSink.nextFlush, + "Last flush time should have been null prior to calling init()"); calendar.set(Calendar.MILLISECOND, 1); calendar.set(Calendar.SECOND, 0); @@ -147,8 +144,8 @@ public void testSetInitialFlushTime() { diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); - assertTrue("The initial flush time was calculated incorrectly: " + diff, - (diff > -1000L) && (diff <= 0L)); + assertTrue((diff > -1000L) && (diff <= 0L), + "The initial flush time was calculated incorrectly: " + diff); } /** @@ -170,26 +167,26 @@ public void testUpdateRollTime() { rfsSink.nextFlush.setTime(calendar.getTime()); rfsSink.updateFlushTime(calendar.getTime()); - assertEquals("The next roll time should have been 1 second in the future", - calendar.getTimeInMillis() + 1000, - rfsSink.nextFlush.getTimeInMillis()); + assertEquals(calendar.getTimeInMillis() + 1000, + rfsSink.nextFlush.getTimeInMillis(), + "The next roll time should have been 1 second in the future"); rfsSink.nextFlush.setTime(calendar.getTime()); calendar.add(Calendar.MILLISECOND, 10); rfsSink.updateFlushTime(calendar.getTime()); - assertEquals("The next roll time should have been 990 ms in the future", - calendar.getTimeInMillis() + 990, - rfsSink.nextFlush.getTimeInMillis()); + assertEquals(calendar.getTimeInMillis() + 990, + rfsSink.nextFlush.getTimeInMillis(), + "The next roll time should have been 990 ms in the future"); rfsSink.nextFlush.setTime(calendar.getTime()); calendar.add(Calendar.SECOND, 2); calendar.add(Calendar.MILLISECOND, 10); rfsSink.updateFlushTime(calendar.getTime()); - assertEquals("The next roll time should have been 990 ms in the future", - calendar.getTimeInMillis() + 990, - rfsSink.nextFlush.getTimeInMillis()); + assertEquals(calendar.getTimeInMillis() + 990, + rfsSink.nextFlush.getTimeInMillis(), + "The next roll time should have been 990 ms in the future"); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java index 2ce02f74f196d..4e813ceaa5af4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java @@ -18,7 +18,7 @@ package org.apache.hadoop.metrics2.sink; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -38,7 +38,8 @@ import org.apache.hadoop.metrics2.impl.MetricsRecordImpl; import org.apache.hadoop.metrics2.impl.MsInfo; import org.apache.hadoop.metrics2.sink.StatsDSink.StatsD; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestStatsDMetrics { @@ -51,7 +52,8 @@ private AbstractMetric makeMetric(String name, Number value, return metric; } - @Test(timeout=3000) + @Test + @Timeout(value = 3) public void testPutMetrics() throws IOException, IllegalAccessException { final StatsDSink sink = new StatsDSink(); List tags = new ArrayList(); @@ -76,17 +78,17 @@ public void testPutMetrics() throws IOException, IllegalAccessException { String result =new String(p.getData(), 0, p.getLength(), StandardCharsets.UTF_8); - assertTrue( - "Received data did not match data sent", - result.equals("host.process.jvm.Context.foo1:1.25|c") || - result.equals("host.process.jvm.Context.foo2:2.25|g")); + assertTrue(result.equals("host.process.jvm.Context.foo1:1.25|c") || + result.equals("host.process.jvm.Context.foo2:2.25|g"), + "Received data did not match data sent"); } finally { sink.close(); } } - @Test(timeout=3000) + @Test + @Timeout(value = 3) public void testPutMetrics2() throws IOException, IllegalAccessException { StatsDSink sink = new StatsDSink(); List tags = new ArrayList(); @@ -111,9 +113,9 @@ public void testPutMetrics2() throws IOException, IllegalAccessException { String result = new String(p.getData(), 0, p.getLength(), StandardCharsets.UTF_8); - assertTrue("Received data did not match data sent", - result.equals("process.jvm.Context.foo1:1|c") || - result.equals("process.jvm.Context.foo2:2|g")); + assertTrue(result.equals("process.jvm.Context.foo1:1|c") || + result.equals("process.jvm.Context.foo2:2|g"), + "Received data did not match data sent"); } finally { sink.close(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaSink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaSink.java index 59ba18803f6bc..3a8c1dcd1083e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaSink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/ganglia/TestGangliaSink.java @@ -21,14 +21,14 @@ import org.apache.commons.configuration2.SubsetConfiguration; import org.apache.hadoop.metrics2.impl.ConfigBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.net.DatagramSocket; import java.net.MulticastSocket; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestGangliaSink { @Test @@ -38,8 +38,8 @@ public void testShouldCreateDatagramSocketByDefault() throws Exception { GangliaSink30 gangliaSink = new GangliaSink30(); gangliaSink.init(conf); DatagramSocket socket = gangliaSink.getDatagramSocket(); - assertFalse("Did not create DatagramSocket", - socket == null || socket instanceof MulticastSocket); + assertFalse(socket == null || socket instanceof MulticastSocket, + "Did not create DatagramSocket"); } @Test @@ -49,8 +49,8 @@ public void testShouldCreateDatagramSocketIfMulticastIsDisabled() throws Excepti GangliaSink30 gangliaSink = new GangliaSink30(); gangliaSink.init(conf); DatagramSocket socket = gangliaSink.getDatagramSocket(); - assertFalse("Did not create DatagramSocket", - socket == null || socket instanceof MulticastSocket); + assertFalse(socket == null || socket instanceof MulticastSocket, + "Did not create DatagramSocket"); } @Test @@ -60,10 +60,10 @@ public void testShouldCreateMulticastSocket() throws Exception { GangliaSink30 gangliaSink = new GangliaSink30(); gangliaSink.init(conf); DatagramSocket socket = gangliaSink.getDatagramSocket(); - assertTrue("Did not create MulticastSocket", - socket != null && socket instanceof MulticastSocket); + assertTrue(socket != null && socket instanceof MulticastSocket, + "Did not create MulticastSocket"); int ttl = ((MulticastSocket) socket).getTimeToLive(); - assertEquals("Did not set default TTL", 1, ttl); + assertEquals(1, ttl, "Did not set default TTL"); } @Test @@ -73,10 +73,10 @@ public void testShouldSetMulticastSocketTtl() throws Exception { GangliaSink30 gangliaSink = new GangliaSink30(); gangliaSink.init(conf); DatagramSocket socket = gangliaSink.getDatagramSocket(); - assertTrue("Did not create MulticastSocket", - socket != null && socket instanceof MulticastSocket); + assertTrue(socket != null && socket instanceof MulticastSocket, + "Did not create MulticastSocket"); int ttl = ((MulticastSocket) socket).getTimeToLive(); - assertEquals("Did not set TTL", 3, ttl); + assertEquals(3, ttl, "Did not set TTL"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java index 5eca1296994cc..2110f33981dde 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java @@ -20,13 +20,16 @@ import org.apache.hadoop.metrics2.impl.MetricsCollectorImpl; import org.apache.hadoop.util.GcTimeMonitor; -import org.junit.After; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.anyFloat; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.verify; import static org.apache.hadoop.test.MetricsAsserts.*; import org.apache.hadoop.conf.Configuration; @@ -36,26 +39,25 @@ import org.apache.hadoop.service.ServiceStateException; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.JvmPauseMonitor; +import org.junit.jupiter.api.Timeout; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.concurrent.TimeUnit; import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.*; import static org.apache.hadoop.metrics2.impl.MsInfo.*; +@Timeout(30) public class TestJvmMetrics { - @Rule - public Timeout timeout = new Timeout(30000, TimeUnit.MILLISECONDS); private JvmPauseMonitor pauseMonitor; private GcTimeMonitor gcTimeMonitor; /** * Robust shutdown of the monitors if they haven't been stopped already. */ - @After + @AfterEach public void teardown() { ServiceOperations.stop(pauseMonitor); if (gcTimeMonitor != null) { @@ -129,7 +131,7 @@ public void testStopBeforeStart() throws Throwable { pauseMonitor.init(new Configuration()); pauseMonitor.stop(); pauseMonitor.start(); - Assert.fail("Expected an exception, got " + pauseMonitor); + fail("Expected an exception, got " + pauseMonitor); } catch (ServiceStateException e) { GenericTestUtils.assertExceptionContains("cannot enter state", e); } @@ -141,7 +143,7 @@ public void testStopBeforeInit() throws Throwable { try { pauseMonitor.stop(); pauseMonitor.init(new Configuration()); - Assert.fail("Expected an exception, got " + pauseMonitor); + fail("Expected an exception, got " + pauseMonitor); } catch (ServiceStateException e) { GenericTestUtils.assertExceptionContains("cannot enter state", e); } @@ -193,10 +195,10 @@ public void alert(GcTimeMonitor.GcData gcData) { gcCount = gcData.getAccumulatedGcCount(); } - Assert.assertTrue(maxGcTimePercentage > 0); - Assert.assertTrue(gcCount > 0); - Assert.assertTrue(alerter.numAlerts > 0); - Assert.assertTrue(alerter.maxGcTimePercentage >= alertGcPerc); + assertTrue(maxGcTimePercentage > 0); + assertTrue(gcCount > 0); + assertTrue(alerter.numAlerts > 0); + assertTrue(alerter.maxGcTimePercentage >= alertGcPerc); } @Test @@ -205,8 +207,8 @@ public void testJvmMetricsSingletonWithSameProcessName() { .initSingleton("test", null); JvmMetrics jvmMetrics2 = org.apache.hadoop.metrics2.source.JvmMetrics .initSingleton("test", null); - Assert.assertEquals("initSingleton should return the singleton instance", - jvmMetrics1, jvmMetrics2); + assertEquals(jvmMetrics1, jvmMetrics2, + "initSingleton should return the singleton instance"); } @Test @@ -217,12 +219,12 @@ public void testJvmMetricsSingletonWithDifferentProcessNames() { final String process2Name = "process2"; JvmMetrics jvmMetrics2 = org.apache.hadoop.metrics2.source.JvmMetrics .initSingleton(process2Name, null); - Assert.assertEquals("initSingleton should return the singleton instance", - jvmMetrics1, jvmMetrics2); - Assert.assertEquals("unexpected process name of the singleton instance", - process1Name, jvmMetrics1.processName); - Assert.assertEquals("unexpected process name of the singleton instance", - process1Name, jvmMetrics2.processName); + assertEquals(jvmMetrics1, jvmMetrics2, + "initSingleton should return the singleton instance"); + assertEquals(process1Name, jvmMetrics1.processName, + "unexpected process name of the singleton instance"); + assertEquals(process1Name, jvmMetrics2.processName, + "unexpected process name of the singleton instance"); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMBeans.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMBeans.java index 3c93dbee06953..fcd2e9aba7359 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMBeans.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMBeans.java @@ -17,8 +17,7 @@ package org.apache.hadoop.metrics2.util; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import javax.management.MBeanServer; import javax.management.ObjectName; @@ -26,6 +25,8 @@ import java.util.HashMap; import java.util.Map; +import static org.junit.jupiter.api.Assertions.assertEquals; + /** * Test MXBean addition of key/value pairs to registered MBeans. */ @@ -46,7 +47,7 @@ public void testRegister() throws Exception { int jmxCounter = (int) platformMBeanServer .getAttribute(objectName, "Counter"); - Assert.assertEquals(counter, jmxCounter); + assertEquals(counter, jmxCounter); } finally { if (objectName != null) { MBeans.unregister(objectName); @@ -70,7 +71,7 @@ public void testRegisterWithAdditionalProperties() throws Exception { ManagementFactory.getPlatformMBeanServer(); int jmxCounter = (int) platformMBeanServer.getAttribute(objectName, "Counter"); - Assert.assertEquals(counter, jmxCounter); + assertEquals(counter, jmxCounter); } finally { if (objectName != null) { MBeans.unregister(objectName); @@ -85,7 +86,7 @@ public void testGetMbeanNameName() { ObjectName mBeanName = MBeans.getMBeanName("Service", "Name", properties); - Assert.assertEquals("Service", + assertEquals("Service", MBeans.getMbeanNameService(mBeanName)); properties.put("key", "value"); @@ -94,7 +95,7 @@ public void testGetMbeanNameName() { "Name", properties); - Assert.assertEquals("Service", + assertEquals("Service", MBeans.getMbeanNameService(mBeanName)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java index e69947ecdc233..2d059b2386e42 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java @@ -21,9 +21,14 @@ import java.util.Arrays; import java.util.Collection; -import org.junit.Test; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; @@ -48,15 +53,15 @@ public class TestMetricsCache { verify(mr).name(); verify(mr).tags(); verify(mr).metrics(); - assertEquals("same record size", cr.metrics().size(), - ((Collection)mr.metrics()).size()); - assertEquals("same metric value", 0, cr.getMetric("m")); + assertEquals(cr.metrics().size(), ((Collection) mr.metrics()).size(), + "same record size"); + assertEquals(0, cr.getMetric("m"), "same metric value"); MetricsRecord mr2 = makeRecord("r", Arrays.asList(makeTag("t", "tv")), Arrays.asList(makeMetric("m", 2), makeMetric("m2", 42))); cr = cache.update(mr2); - assertEquals("contains 3 metric", 3, cr.metrics().size()); + assertEquals(3, cr.metrics().size(), "contains 3 metric"); checkMetricValue("updated metric value", cr, "m", 2); checkMetricValue("old metric value", cr, "m1", 1); checkMetricValue("new metric value", cr, "m2", 42); @@ -65,21 +70,21 @@ public class TestMetricsCache { Arrays.asList(makeTag("t", "tv3")), // different tag value Arrays.asList(makeMetric("m3", 3))); cr = cache.update(mr3); // should get a new record - assertEquals("contains 1 metric", 1, cr.metrics().size()); + assertEquals(1, cr.metrics().size(), "contains 1 metric"); checkMetricValue("updated metric value", cr, "m3", 3); // tags cache should be empty so far - assertEquals("no tags", 0, cr.tags().size()); + assertEquals(0, cr.tags().size(), "no tags"); // until now cr = cache.update(mr3, true); - assertEquals("Got 1 tag", 1, cr.tags().size()); - assertEquals("Tag value", "tv3", cr.getTag("t")); + assertEquals(1, cr.tags().size(), "Got 1 tag"); + assertEquals("tv3", cr.getTag("t"), "Tag value"); checkMetricValue("Metric value", cr, "m3", 3); } @SuppressWarnings("deprecation") @Test public void testGet() { MetricsCache cache = new MetricsCache(); - assertNull("empty", cache.get("r", Arrays.asList(makeTag("t", "t")))); + assertNull(cache.get("r", Arrays.asList(makeTag("t", "t"))), "empty"); MetricsRecord mr = makeRecord("r", Arrays.asList(makeTag("t", "t")), Arrays.asList(makeMetric("m", 1))); @@ -87,8 +92,8 @@ public class TestMetricsCache { MetricsCache.Record cr = cache.get("r", mr.tags()); LOG.debug("tags="+ mr.tags() +" cr="+ cr); - assertNotNull("Got record", cr); - assertEquals("contains 1 metric", 1, cr.metrics().size()); + assertNotNull(cr, "Got record"); + assertEquals(1, cr.metrics().size(), "contains 1 metric"); checkMetricValue("new metric value", cr, "m", 1); } @@ -102,7 +107,7 @@ public class TestMetricsCache { Arrays.asList(makeMetric("m", 0), makeMetric("m1", 1))); MetricsCache.Record cr = cache.update(mr); - assertTrue("t value should be null", null == cr.getTag("t")); + assertTrue(null == cr.getTag("t"), "t value should be null"); } @Test public void testOverflow() { @@ -115,17 +120,17 @@ public class TestMetricsCache { Arrays.asList(makeMetric("m", i)))); checkMetricValue("new metric value", cr, "m", i); if (i < MetricsCache.MAX_RECS_PER_NAME_DEFAULT) { - assertNotNull("t0 is still there", cache.get("r", t0)); + assertNotNull(cache.get("r", t0), "t0 is still there"); } } - assertNull("t0 is gone", cache.get("r", t0)); + assertNull(cache.get("r", t0), "t0 is gone"); } private void checkMetricValue(String description, MetricsCache.Record cr, String key, Number val) { - assertEquals(description, val, cr.getMetric(key)); - assertNotNull("metric not null", cr.getMetricInstance(key)); - assertEquals(description, val, cr.getMetricInstance(key).value()); + assertEquals(val, cr.getMetric(key), description); + assertNotNull(cr.getMetricInstance(key), "metric not null"); + assertEquals(val, cr.getMetricInstance(key).value(), description); } private MetricsRecord makeRecord(String name, Collection tags, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java index aefd7a264b05d..dc361c9a2c1c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java @@ -25,8 +25,8 @@ import java.util.Random; import org.apache.hadoop.metrics2.lib.MutableInverseQuantiles; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; @@ -39,7 +39,7 @@ public class TestSampleQuantiles { SampleQuantiles estimator; final static int NUM_REPEATS = 10; - @Before + @BeforeEach public void init() { estimator = new SampleQuantiles(quantiles); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java index 0fb0ad8ace959..04f8470de59c1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleStat.java @@ -18,8 +18,8 @@ package org.apache.hadoop.metrics2.util; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Test the running sample stat computation @@ -32,36 +32,36 @@ public class TestSampleStat { */ @Test public void testSimple() { SampleStat stat = new SampleStat(); - assertEquals("num samples", 0, stat.numSamples()); - assertEquals("mean", 0.0, stat.mean(), EPSILON); - assertEquals("variance", 0.0, stat.variance(), EPSILON); - assertEquals("stddev", 0.0, stat.stddev(), EPSILON); - assertEquals("min", SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON); - assertEquals("max", SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON); + assertEquals(0, stat.numSamples(), "num samples"); + assertEquals(0.0, stat.mean(), EPSILON, "mean"); + assertEquals(0.0, stat.variance(), EPSILON, "variance"); + assertEquals(0.0, stat.stddev(), EPSILON, "stddev"); + assertEquals(SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON, "min"); + assertEquals(SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON, "max"); stat.add(3); - assertEquals("num samples", 1L, stat.numSamples()); - assertEquals("mean", 3.0, stat.mean(), EPSILON); - assertEquals("variance", 0.0, stat.variance(), EPSILON); - assertEquals("stddev", 0.0, stat.stddev(), EPSILON); - assertEquals("min", 3.0, stat.min(), EPSILON); - assertEquals("max", 3.0, stat.max(), EPSILON); + assertEquals(1L, stat.numSamples(), "num samples"); + assertEquals(3.0, stat.mean(), EPSILON, "mean"); + assertEquals(0.0, stat.variance(), EPSILON, "variance"); + assertEquals(0.0, stat.stddev(), EPSILON, "stddev"); + assertEquals(3.0, stat.min(), EPSILON, "min"); + assertEquals(3.0, stat.max(), EPSILON, "max"); stat.add(2).add(1); - assertEquals("num samples", 3L, stat.numSamples()); - assertEquals("mean", 2.0, stat.mean(), EPSILON); - assertEquals("variance", 1.0, stat.variance(), EPSILON); - assertEquals("stddev", 1.0, stat.stddev(), EPSILON); - assertEquals("min", 1.0, stat.min(), EPSILON); - assertEquals("max", 3.0, stat.max(), EPSILON); + assertEquals(3L, stat.numSamples(), "num samples"); + assertEquals(2.0, stat.mean(), EPSILON, "mean"); + assertEquals(1.0, stat.variance(), EPSILON, "variance"); + assertEquals(1.0, stat.stddev(), EPSILON, "stddev"); + assertEquals(1.0, stat.min(), EPSILON, "min"); + assertEquals(3.0, stat.max(), EPSILON, "max"); stat.reset(); - assertEquals("num samples", 0, stat.numSamples()); - assertEquals("mean", 0.0, stat.mean(), EPSILON); - assertEquals("variance", 0.0, stat.variance(), EPSILON); - assertEquals("stddev", 0.0, stat.stddev(), EPSILON); - assertEquals("min", SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON); - assertEquals("max", SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON); + assertEquals(0, stat.numSamples(), "num samples"); + assertEquals(0.0, stat.mean(), EPSILON, "mean"); + assertEquals(0.0, stat.variance(), EPSILON, "variance"); + assertEquals(0.0, stat.stddev(), EPSILON, "stddev"); + assertEquals(SampleStat.MinMax.DEFAULT_MIN_VALUE, stat.min(), EPSILON, "min"); + assertEquals(SampleStat.MinMax.DEFAULT_MAX_VALUE, stat.max(), EPSILON, "max"); } }