diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedChunkedFile.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedChunkedFile.java index b6d0fd2544d63..dd33410dee73a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedChunkedFile.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedChunkedFile.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.mapred; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.RandomAccessFile; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Unit test for FadvisedChunkedFile. @@ -42,11 +42,11 @@ public void testDoubleClose() throws Exception { f, 0, 5, 2, true, 10, null, "foo"); - assertTrue("fd not valid", f.getFD().valid()); + assertTrue(f.getFD().valid(), "fd not valid"); af.close(); - assertFalse("fd still valid", f.getFD().valid()); + assertFalse(f.getFD().valid(), "fd still valid"); af.close(); - assertFalse("fd still valid", f.getFD().valid()); + assertFalse(f.getFD().valid(), "fd still valid"); } } finally { absoluteFile.delete(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedFileRegion.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedFileRegion.java index ce0c0d6aeafe9..7e55b64dfaafa 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedFileRegion.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestFadvisedFileRegion.java @@ -28,17 +28,21 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + public class TestFadvisedFileRegion { private final int FILE_SIZE = 16*1024*1024; private static final Logger LOG = LoggerFactory.getLogger(TestFadvisedFileRegion.class); - @Test(timeout = 100000) + @Test + @Timeout(value = 100) public void testCustomShuffleTransfer() throws IOException { File absLogDir = new File("target", TestFadvisedFileRegion.class.getSimpleName() + @@ -84,7 +88,7 @@ public void testCustomShuffleTransfer() throws IOException { targetFile = new RandomAccessFile(outFile.getAbsolutePath(), "rw"); target = targetFile.getChannel(); - Assert.assertEquals(FILE_SIZE, inputFile.length()); + assertEquals(FILE_SIZE, inputFile.length()); //create FadvisedFileRegion fileRegion = new FadvisedFileRegion( @@ -100,8 +104,8 @@ public void testCustomShuffleTransfer() throws IOException { } //assert size - Assert.assertEquals(count, (int)pos); - Assert.assertEquals(count, targetFile.length()); + assertEquals(count, (int)pos); + assertEquals(count, targetFile.length()); } finally { if (fileRegion != null) { fileRegion.deallocate(); @@ -117,10 +121,10 @@ public void testCustomShuffleTransfer() throws IOException { try { int total = in.read(buff, 0, count); - Assert.assertEquals(count, total); + assertEquals(count, total); for(int i = 0; i < count; i++) { - Assert.assertEquals(initBuff[position+i], buff[i]); + assertEquals(initBuff[position+i], buff[i]); } } finally { IOUtils.cleanupWithLogger(LOG, in); @@ -137,21 +141,21 @@ private static void customShuffleTransferCornerCases( FadvisedFileRegion fileRegion, WritableByteChannel target, int count) { try { fileRegion.customShuffleTransfer(target, -1); - Assert.fail("Expected a IllegalArgumentException"); + fail("Expected a IllegalArgumentException"); } catch (IllegalArgumentException ie) { LOG.info("Expected - illegal argument is passed."); } catch (Exception e) { - Assert.fail("Expected a IllegalArgumentException"); + fail("Expected a IllegalArgumentException"); } //test corner cases try { fileRegion.customShuffleTransfer(target, count + 1); - Assert.fail("Expected a IllegalArgumentException"); + fail("Expected a IllegalArgumentException"); } catch (IllegalArgumentException ie) { LOG.info("Expected - illegal argument is passed."); } catch (Exception e) { - Assert.fail("Expected a IllegalArgumentException"); + fail("Expected a IllegalArgumentException"); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java index f052e4bc70c34..5b4f0971822cf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java @@ -80,7 +80,7 @@ import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.Token; import org.eclipse.jetty.http.HttpHeader; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.LoggerFactory; import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; @@ -93,10 +93,10 @@ import static org.apache.hadoop.mapred.ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT; import static org.apache.hadoop.mapred.ShuffleHandler.TIMEOUT_HANDLER; import static org.apache.hadoop.mapreduce.security.SecureShuffleUtils.HTTP_HEADER_URL_HASH; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestShuffleChannelHandler extends TestShuffleHandlerBase { private static final org.slf4j.Logger LOG = @@ -163,12 +163,12 @@ public void tetKeepAliveTimeout() throws InterruptedException, IOException { t.getExpectedHttpResponse(req, true, 46), t.getAttemptData(new Attempt(TEST_ATTEMPT_1, TEST_DATA_A)) ); - assertTrue("keep-alive", shuffle.isActive()); + assertTrue(shuffle.isActive(), "keep-alive"); TimeUnit.SECONDS.sleep(3); shuffle.runScheduledPendingTasks(); - assertFalse("closed", shuffle.isActive()); + assertFalse(shuffle.isActive(), "closed"); } @Test @@ -193,7 +193,7 @@ public void testIncompatibleShuffleVersion() { assertEquals(getExpectedHttpResponse(HttpResponseStatus.BAD_REQUEST).toString(), actual.toString()); - assertFalse("closed", shuffle.isActive()); // known-issue + assertFalse(shuffle.isActive(), "closed"); // known-issue } @Test @@ -216,7 +216,7 @@ public void testInvalidMapNoIndexFile() { assertEquals(getExpectedHttpResponse(HttpResponseStatus.INTERNAL_SERVER_ERROR).toString(), actual.toString()); - assertFalse("closed", shuffle.isActive()); + assertFalse(shuffle.isActive(), "closed"); } @Test @@ -225,7 +225,7 @@ public void testInvalidMapNoDataFile() { final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion(); String dataFile = getDataFile(TEST_USER, tempDir.toAbsolutePath().toString(), TEST_ATTEMPT_2); - assertTrue("should delete", new File(dataFile).delete()); + assertTrue(new File(dataFile).delete(), "should delete"); FullHttpRequest req = t.createRequest(getUri(TEST_JOB_ID, 0, Arrays.asList(TEST_ATTEMPT_1, TEST_ATTEMPT_2), false)); @@ -243,7 +243,7 @@ public void testInvalidMapNoDataFile() { assertEquals(getExpectedHttpResponse(HttpResponseStatus.INTERNAL_SERVER_ERROR).toString(), actual.toString()); - assertFalse("closed", shuffle.isActive()); + assertFalse(shuffle.isActive(), "closed"); } private DefaultHttpResponse getExpectedHttpResponse(HttpResponseStatus status) { @@ -362,7 +362,7 @@ private void testGetAllAttemptsForReduce0NoKeepAlive( getExpectedHttpResponse(request, false, 138), getAllAttemptsForReduce0() ); - assertFalse("no keep-alive", shuffle.isActive()); + assertFalse(shuffle.isActive(), "no keep-alive"); } private void testKeepAlive(java.util.Queue messages, @@ -374,7 +374,7 @@ private void testKeepAlive(java.util.Queue messages, getExpectedHttpResponse(req1, true, 46), getAttemptData(new Attempt(TEST_ATTEMPT_1, TEST_DATA_A)) ); - assertTrue("keep-alive", shuffle.isActive()); + assertTrue(shuffle.isActive(), "keep-alive"); messages.clear(); final FullHttpRequest req2 = createRequest( @@ -384,7 +384,7 @@ private void testKeepAlive(java.util.Queue messages, getExpectedHttpResponse(req2, true, 46), getAttemptData(new Attempt(TEST_ATTEMPT_2, TEST_DATA_B)) ); - assertTrue("keep-alive", shuffle.isActive()); + assertTrue(shuffle.isActive(), "keep-alive"); messages.clear(); final FullHttpRequest req3 = createRequest( @@ -394,7 +394,7 @@ private void testKeepAlive(java.util.Queue messages, getExpectedHttpResponse(req3, false, 46), getAttemptData(new Attempt(TEST_ATTEMPT_3, TEST_DATA_C)) ); - assertFalse("no keep-alive", shuffle.isActive()); + assertFalse(shuffle.isActive(), "no keep-alive"); } private ArrayList getAllAttemptsForReduce0() throws IOException { @@ -437,15 +437,15 @@ private void assertResponse(java.util.Queue outboundMessages, assertEquals(response.toString(), resp.toString()); } if (i > 0 && i <= content.size()) { - assertEquals("data should match", - ByteBufUtil.prettyHexDump(content.get(i - 1)), actualHexdump); + assertEquals(ByteBufUtil.prettyHexDump(content.get(i - 1)), + actualHexdump, "data should match"); } i++; } // This check is done after to have better debug logs on failure. - assertEquals("all data should match", content.size() + 1, outboundMessages.size()); + assertEquals(content.size() + 1, outboundMessages.size(), "all data should match"); } public EmbeddedChannel createShuffleHandlerChannelFileRegion() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java index cc46b49b11334..274d7f3d11304 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java @@ -28,11 +28,11 @@ import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertGauge; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import static org.junit.Assume.assumeTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -79,7 +79,8 @@ import org.apache.hadoop.yarn.server.api.ApplicationInitializationContext; import org.apache.hadoop.yarn.server.api.ApplicationTerminationContext; import org.apache.hadoop.yarn.server.records.Version; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -97,7 +98,8 @@ public class TestShuffleHandler extends TestShuffleHandlerBase { * * @throws Exception exception */ - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testSerializeMeta() throws Exception { assertEquals(1, ShuffleHandler.deserializeMetaData( ShuffleHandler.serializeMetaData(1))); @@ -112,7 +114,8 @@ public void testSerializeMeta() throws Exception { * * @throws Exception exception */ - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testShuffleMetrics() throws Exception { MetricsSystem ms = new MetricsSystemImpl(); ShuffleHandler sh = new ShuffleHandler(ms); @@ -135,7 +138,7 @@ public void testShuffleMetrics() throws Exception { } static void checkShuffleMetrics(MetricsSystem ms, long bytes, int failed, - int succeeded, int connections) { + int succeeded, int connections) { MetricsSource source = ms.getSource("ShuffleMetrics"); MetricsRecordBuilder rb = getMetrics(source); assertCounter("ShuffleOutputBytes", bytes, rb); @@ -149,7 +152,8 @@ static void checkShuffleMetrics(MetricsSystem ms, long bytes, int failed, * * @throws Exception exception */ - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testMaxConnections() throws Exception { final int maxAllowedConnections = 3; final int notAcceptedConnections = 1; @@ -195,34 +199,33 @@ public void testMaxConnections() throws Exception { connectionList.add(conn); } - assertEquals(String.format("Expected only %s and %s response", - OK_STATUS, ShuffleHandler.TOO_MANY_REQ_STATUS), + assertEquals( Sets.newHashSet( HttpURLConnection.HTTP_OK, ShuffleHandler.TOO_MANY_REQ_STATUS.code()), - mapOfConnections.keySet()); + mapOfConnections.keySet(), String.format("Expected only %s and %s response", + OK_STATUS, ShuffleHandler.TOO_MANY_REQ_STATUS)); List successfulConnections = mapOfConnections.get(HttpURLConnection.HTTP_OK); - assertEquals(String.format("Expected exactly %d requests " + - "with %s response", maxAllowedConnections, OK_STATUS), - maxAllowedConnections, successfulConnections.size()); + assertEquals(maxAllowedConnections, successfulConnections.size(), + String.format("Expected exactly %d requests " + + "with %s response", maxAllowedConnections, OK_STATUS)); //Ensure exactly one connection is HTTP 429 (TOO MANY REQUESTS) List closedConnections = mapOfConnections.get(ShuffleHandler.TOO_MANY_REQ_STATUS.code()); - assertEquals(String.format("Expected exactly %d %s response", - notAcceptedConnections, ShuffleHandler.TOO_MANY_REQ_STATUS), - notAcceptedConnections, closedConnections.size()); + assertEquals(notAcceptedConnections, closedConnections.size(), + String.format("Expected exactly %d %s response", + notAcceptedConnections, ShuffleHandler.TOO_MANY_REQ_STATUS)); // This connection should be closed because it is above the maximum limit HttpURLConnection conn = closedConnections.get(0); - assertEquals(String.format("Expected a %s response", - ShuffleHandler.TOO_MANY_REQ_STATUS), - ShuffleHandler.TOO_MANY_REQ_STATUS.code(), conn.getResponseCode()); + assertEquals(ShuffleHandler.TOO_MANY_REQ_STATUS.code(), conn.getResponseCode(), + String.format("Expected a %s response", ShuffleHandler.TOO_MANY_REQ_STATUS)); long backoff = Long.parseLong( conn.getHeaderField(ShuffleHandler.RETRY_AFTER_HEADER)); - assertTrue("The backoff value cannot be negative.", backoff > 0); + assertTrue(backoff > 0, "The backoff value cannot be negative."); shuffleHandler.stop(); } @@ -232,7 +235,8 @@ public void testMaxConnections() throws Exception { * * @throws Exception exception */ - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testKeepAlive() throws Exception { Configuration conf = new Configuration(); ShuffleHandlerMock shuffleHandler = new ShuffleHandlerMock(); @@ -262,8 +266,8 @@ public void testKeepAlive() throws Exception { shuffleHandler.stop(); List actual = matchLogs("connections=\\d+"); - assertEquals("only one connection was used", - Arrays.asList("connections=1", "connections=0"), actual); + assertEquals(Arrays.asList("connections=1", "connections=0"), actual, + "only one connection was used"); } /** @@ -272,7 +276,8 @@ public void testKeepAlive() throws Exception { * * @throws IOException exception */ - @Test(timeout = 100000) + @Test + @Timeout(value = 100) public void testMapFileAccess() throws IOException { // This will run only in NativeIO is enabled as SecureIOUtils need it assumeTrue(NativeIO.isAvailable()); @@ -326,9 +331,9 @@ public void testMapFileAccess() throws IOException { String message = "Owner '" + owner + "' for path " + indexFilePath + " did not match expected owner '" + randomUser + "'"; - assertTrue(String.format("Received string '%s' should contain " + - "message '%s'", receivedString, message), - receivedString.contains(message)); + assertTrue(receivedString.contains(message), + String.format("Received string '%s' should contain message '%s'", + receivedString, message)); assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, conn.getResponseCode()); LOG.info("received: " + receivedString); assertNotEquals("", receivedString); @@ -454,8 +459,8 @@ public void testRecoveryFromOtherVersions() throws IOException { shuffle.start(); fail("Incompatible version, should expect fail here."); } catch (ServiceStateException e) { - assertTrue("Exception message mismatch", - e.getMessage().contains("Incompatible version for state DB schema:")); + assertTrue(e.getMessage().contains("Incompatible version for state DB schema:"), + "Exception message mismatch"); } } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandlerBase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandlerBase.java index 406f286623006..db5356daa93ef 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandlerBase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandlerBase.java @@ -41,14 +41,14 @@ import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader; import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache; import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalListener; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import static io.netty.util.ResourceLeakDetector.Level.PARANOID; import static org.apache.hadoop.io.MapFile.DATA_FILE_NAME; import static org.apache.hadoop.io.MapFile.INDEX_FILE_NAME; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestShuffleHandlerBase { public static final String TEST_ATTEMPT_1 = "attempt_1111111111111_0001_m_000001_0"; @@ -65,7 +65,7 @@ public class TestShuffleHandlerBase { @SuppressWarnings("checkstyle:VisibilityModifier") protected java.nio.file.Path tempDir; - @Before + @BeforeEach public void setup() throws IOException { tempDir = Files.createTempDirectory("test-shuffle-channel-handler"); tempDir.toFile().deleteOnExit(); @@ -82,7 +82,7 @@ public void setup() throws IOException { System.setOut(new PrintStream(outputStreamCaptor)); } - @After + @AfterEach public void teardown() { System.setOut(standardOut); System.out.print(outputStreamCaptor); @@ -102,7 +102,7 @@ public List matchLogs(String pattern) { } public static void generateMapOutput(String user, String tempDir, - String attempt, List maps) + String attempt, List maps) throws IOException { SpillRecord record = new SpillRecord(maps.size());