Skip to content

Commit

Permalink
MAPREDUCE-7418. Upgrade Junit 4 to 5 in hadoop-mapreduce-client-app.
Browse files Browse the repository at this point in the history
  • Loading branch information
slfan1989 authored and fanshilun committed Feb 4, 2025
1 parent 950b3eb commit 4d18776
Show file tree
Hide file tree
Showing 51 changed files with 1,879 additions and 1,928 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -75,7 +75,7 @@ private static void delete(File dir) throws IOException {
fs.delete(p, true);
}

@BeforeClass
@BeforeAll
public static void setupTestDirs() throws IOException {
testWorkDir = new File("target",
TestLocalContainerLauncher.class.getCanonicalName());
Expand All @@ -89,29 +89,26 @@ public static void setupTestDirs() throws IOException {
}
}

@AfterClass
@AfterAll
public static void cleanupTestDirs() throws IOException {
if (testWorkDir != null) {
delete(testWorkDir);
}
}

@SuppressWarnings("rawtypes")
@Test(timeout=10000)
@Test
@Timeout(value = 10)
public void testKillJob() throws Exception {
JobConf conf = new JobConf();
AppContext context = mock(AppContext.class);
// a simple event handler solely to detect the container cleaned event
final CountDownLatch isDone = new CountDownLatch(1);
EventHandler<Event> handler = new EventHandler<Event>() {
@Override
public void handle(Event event) {
LOG.info("handling event " + event.getClass() +
" with type " + event.getType());
if (event instanceof TaskAttemptEvent) {
if (event.getType() == TaskAttemptEventType.TA_CONTAINER_CLEANED) {
isDone.countDown();
}
EventHandler<Event> handler = event -> {
LOG.info("handling event {} with type {}.", event.getClass(), event.getType());
if (event instanceof TaskAttemptEvent) {
if (event.getType() == TaskAttemptEventType.TA_CONTAINER_CLEANED) {
isDone.countDown();
}
}
};
Expand All @@ -132,7 +129,7 @@ public void handle(Event event) {
Job job = mock(Job.class);
when(job.getTotalMaps()).thenReturn(1);
when(job.getTotalReduces()).thenReturn(0);
Map<JobId,Job> jobs = new HashMap<JobId,Job>();
Map<JobId,Job> jobs = new HashMap<>();
jobs.put(jobId, job);
// app context returns the one and only job
when(context.getAllJobs()).thenReturn(jobs);
Expand All @@ -149,14 +146,11 @@ public void handle(Event event) {
TaskAttemptID taskID = TypeConverter.fromYarn(taId);
when(mapTask.getTaskID()).thenReturn(taskID);
when(mapTask.getJobID()).thenReturn(taskID.getJobID());
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
// sleep for a long time
LOG.info("sleeping for 5 minutes...");
Thread.sleep(5*60*1000);
return null;
}
doAnswer((Answer<Void>) invocation -> {
// sleep for a long time
LOG.info("sleeping for 5 minutes...");
Thread.sleep(5 * 60 * 1000);
return null;
}).when(mapTask).run(isA(JobConf.class), isA(TaskUmbilicalProtocol.class));

// pump in a task attempt launch event
Expand All @@ -168,7 +162,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable {
// now pump in a container clean-up event
ContainerLauncherEvent cleanupEvent =
new ContainerLauncherEvent(taId, null, null, null,
ContainerLauncher.EventType.CONTAINER_REMOTE_CLEANUP);
ContainerLauncher.EventType.CONTAINER_REMOTE_CLEANUP);
launcher.handle(cleanupEvent);

// wait for the event to fire: this should be received promptly
Expand All @@ -194,12 +188,12 @@ public void testRenameMapOutputForReduce() throws Exception {

// make sure both dirs are distinct
//
conf.set(MRConfig.LOCAL_DIR, localDirs[0].toString());
conf.set(MRConfig.LOCAL_DIR, localDirs[0]);
final Path mapOut = mrOutputFiles.getOutputFileForWrite(1);
conf.set(MRConfig.LOCAL_DIR, localDirs[1].toString());
conf.set(MRConfig.LOCAL_DIR, localDirs[1]);
final Path mapOutIdx = mrOutputFiles.getOutputIndexFileForWrite(1);
Assert.assertNotEquals("Paths must be different!",
mapOut.getParent(), mapOutIdx.getParent());
Assertions.assertNotEquals(
mapOut.getParent(), mapOutIdx.getParent(), "Paths must be different!");

// make both dirs part of LOCAL_DIR
conf.setStrings(MRConfig.LOCAL_DIR, localDirs);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,15 @@
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.SystemClock;

import org.junit.Test;
import static org.junit.Assert.assertTrue;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class TestTaskAttemptFinishingMonitor {

@Test
public void testFinshingAttemptTimeout()
public void testFinishingAttemptTimeout()
throws IOException, InterruptedException {
SystemClock clock = SystemClock.getInstance();
Configuration conf = new Configuration();
Expand Down Expand Up @@ -87,7 +87,7 @@ public void testFinshingAttemptTimeout()
}
taskAttemptFinishingMonitor.stop();

assertTrue("Finishing attempt didn't time out.", eventHandler.timedOut);
assertTrue(eventHandler.timedOut, "Finishing attempt didn't time out.");

}

Expand All @@ -103,6 +103,6 @@ public void handle(Event event) {
}
}
}
};
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;

import org.junit.After;
import org.junit.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
Expand Down Expand Up @@ -69,12 +69,12 @@
import org.apache.hadoop.yarn.util.SystemClock;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.eq;
Expand Down Expand Up @@ -172,21 +172,22 @@ protected void stopRpcServer() {
}
}

@After
@AfterEach
public void after() throws IOException {
if (listener != null) {
listener.close();
listener = null;
}
}

@Test (timeout=5000)
@Test
@Timeout(value = 5)
public void testGetTask() throws IOException {
configureMocks();
startListener(false);

// Verify ask before registration.
//The JVM ID has not been registered yet so we should kill it.
//The JVM ID has not been registered yet, so we should kill it.
JvmContext context = new JvmContext();

context.jvmId = id;
Expand Down Expand Up @@ -220,7 +221,7 @@ public void testGetTask() throws IOException {

listener.unregister(attemptId, wid);

// Verify after unregistration.
// Verify after un-registration.
result = listener.getTask(context);
assertNotNull(result);
assertTrue(result.shouldDie);
Expand All @@ -238,16 +239,18 @@ public void testGetTask() throws IOException {

}

@Test (timeout=5000)
@Test
@Timeout(value = 5)
public void testJVMId() {

JVMId jvmid = new JVMId("test", 1, true, 2);
JVMId jvmid1 = JVMId.forName("jvm_test_0001_m_000002");
// test compare methot should be the same
// test compare method should be the same
assertEquals(0, jvmid.compareTo(jvmid1));
}

@Test (timeout=10000)
@Test
@Timeout(value = 10)
public void testGetMapCompletionEvents() throws IOException {
TaskAttemptCompletionEvent[] empty = {};
TaskAttemptCompletionEvent[] taskEvents = {
Expand Down Expand Up @@ -312,7 +315,8 @@ private static TaskAttemptCompletionEvent createTce(int eventId,
return tce;
}

@Test (timeout=10000)
@Test
@Timeout(value = 10)
public void testCommitWindow() throws IOException {
SystemClock clock = SystemClock.getInstance();

Expand Down Expand Up @@ -373,7 +377,7 @@ protected void registerHeartbeatHandler(Configuration conf) {

TaskAttemptID tid = new TaskAttemptID("12345", 1, TaskType.REDUCE, 1, 0);

List<Path> partialOut = new ArrayList<Path>();
List<Path> partialOut = new ArrayList<>();
partialOut.add(new Path("/prev1"));
partialOut.add(new Path("/prev2"));

Expand Down Expand Up @@ -551,16 +555,13 @@ protected void stopRpcServer() {
long unregisterTimeout = conf.getLong(MRJobConfig.TASK_EXIT_TIMEOUT,
MRJobConfig.TASK_EXIT_TIMEOUT_DEFAULT);
clock.setTime(unregisterTimeout + 1);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
try {
AMFeedback response =
tal.statusUpdate(attemptID, firstReduceStatus);
return !response.getTaskFound();
} catch (Exception e) {
throw new RuntimeException("status update failed", e);
}
GenericTestUtils.waitFor(() -> {
try {
AMFeedback response =
tal.statusUpdate(attemptID, firstReduceStatus);
return !response.getTaskFound();
} catch (Exception e) {
throw new RuntimeException("status update failed", e);
}
}, 10, 10000);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ClusterStorageCapacityExceededException;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

import static org.mockito.Mockito.*;

Expand All @@ -36,7 +36,7 @@ public class TestYarnChild {
final static private String KILL_LIMIT_EXCEED_CONF_NAME =
"mapreduce.job.dfs.storage.capacity.kill-limit-exceed";

@Before
@BeforeEach
public void setUp() throws Exception {
task = mock(Task.class);
umbilical = mock(TaskUmbilicalProtocol.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
package org.apache.hadoop.mapreduce.jobhistory;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
Expand All @@ -40,7 +40,8 @@
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;

public class TestEvents {

Expand All @@ -50,7 +51,8 @@ public class TestEvents {
*
* @throws Exception
*/
@Test(timeout = 10000)
@Test
@Timeout(value = 10)
public void testTaskAttemptFinishedEvent() throws Exception {

JobID jid = new JobID("001", 1);
Expand Down Expand Up @@ -79,7 +81,8 @@ public void testTaskAttemptFinishedEvent() throws Exception {
* @throws Exception
*/

@Test(timeout = 10000)
@Test
@Timeout(value = 10)
public void testJobPriorityChange() throws Exception {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
Expand All @@ -89,7 +92,8 @@ public void testJobPriorityChange() throws Exception {

}

@Test(timeout = 10000)
@Test
@Timeout(value = 10)
public void testJobQueueChange() throws Exception {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobQueueChangeEvent test = new JobQueueChangeEvent(jid,
Expand All @@ -103,7 +107,8 @@ public void testJobQueueChange() throws Exception {
*
* @throws Exception
*/
@Test(timeout = 10000)
@Test
@Timeout(value = 10)
public void testTaskUpdated() throws Exception {
JobID jid = new JobID("001", 1);
TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
Expand All @@ -118,7 +123,8 @@ public void testTaskUpdated() throws Exception {
* instance of HistoryEvent Different HistoryEvent should have a different
* datum.
*/
@Test(timeout = 10000)
@Test
@Timeout(value = 10)
public void testEvents() throws Exception {

EventReader reader = new EventReader(new DataInputStream(
Expand Down
Loading

0 comments on commit 4d18776

Please sign in to comment.