Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MAPREDUCE-7414. [JDK17] Upgrade Junit 4 to 5 in hadoop-mapreduce-client-hs. #7354

Merged
merged 5 commits into from
Feb 6, 2025
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,18 @@
import org.apache.hadoop.mapreduce.v2.api.records.Phase;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.hs.CompletedTask;
import org.junit.Test;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;

public class TestCompletedTask{
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

@Test (timeout=5000)
public class TestCompletedTask {

@Test
@Timeout(value = 5)
public void testTaskStartTimes() {

TaskId taskId = mock(TaskId.class);
Expand All @@ -48,13 +52,13 @@ public void testTaskStartTimes() {
TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
TaskAttemptInfo info = mock(TaskAttemptInfo.class);
when(info.getAttemptId()).thenReturn(id);
when(info.getStartTime()).thenReturn(10l);
when(info.getStartTime()).thenReturn(10L);
taskAttempts.put(id, info);

id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
info = mock(TaskAttemptInfo.class);
when(info.getAttemptId()).thenReturn(id);
when(info.getStartTime()).thenReturn(20l);
when(info.getStartTime()).thenReturn(20L);
taskAttempts.put(id, info);


Expand All @@ -69,7 +73,8 @@ public void testTaskStartTimes() {
/**
* test some methods of CompletedTaskAttempt
*/
@Test (timeout=5000)
@Test
@Timeout(value = 5)
public void testCompletedTaskAttempt(){

TaskAttemptInfo attemptInfo= mock(TaskAttemptInfo.class);
Expand All @@ -82,14 +87,14 @@ public void testCompletedTaskAttempt(){
TaskID taskId =new TaskID(jobId,TaskType.REDUCE, 0);
TaskAttemptID taskAttemptId= new TaskAttemptID(taskId, 0);
when(attemptInfo.getAttemptId()).thenReturn(taskAttemptId);


CompletedTaskAttempt taskAttemt= new CompletedTaskAttempt(null,attemptInfo);
assertEquals( "Rackname", taskAttemt.getNodeRackName());
assertEquals( Phase.CLEANUP, taskAttemt.getPhase());
assertTrue( taskAttemt.isFinished());
assertEquals( 11L, taskAttemt.getShuffleFinishTime());
assertEquals( 12L, taskAttemt.getSortFinishTime());
assertEquals( 10, taskAttemt.getShufflePort());
assertEquals("Rackname", taskAttemt.getNodeRackName());
assertEquals(Phase.CLEANUP, taskAttemt.getPhase());
assertTrue(taskAttemt.isFinished());
assertEquals(11L, taskAttemt.getShuffleFinishTime());
assertEquals(12L, taskAttemt.getSortFinishTime());
assertEquals(10, taskAttemt.getShufflePort());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@

import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -47,27 +46,30 @@
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ControlledClock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;

import static org.mockito.Mockito.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class TestHistoryFileManager {
private static MiniDFSCluster dfsCluster = null;
private static MiniDFSCluster dfsCluster2 = null;
private static String coreSitePath;

@Rule
public TestName name = new TestName();

@BeforeClass
@BeforeAll
public static void setUpClass() throws Exception {
coreSitePath = "." + File.separator + "target" + File.separator +
"test-classes" + File.separator + "core-site.xml";
"test-classes" + File.separator + "core-site.xml";
Configuration conf = new HdfsConfiguration();
Configuration conf2 = new HdfsConfiguration();
dfsCluster = new MiniDFSCluster.Builder(conf).build();
Expand All @@ -77,13 +79,13 @@ public static void setUpClass() throws Exception {
dfsCluster2 = new MiniDFSCluster.Builder(conf2).build();
}

@AfterClass
@AfterAll
public static void cleanUpClass() throws Exception {
dfsCluster.shutdown();
dfsCluster2.shutdown();
}

@After
@AfterEach
public void cleanTest() throws Exception {
new File(coreSitePath).delete();
dfsCluster.getFileSystem().setSafeMode(
Expand All @@ -92,46 +94,48 @@ public void cleanTest() throws Exception {
SafeModeAction.LEAVE);
}

private String getDoneDirNameForTest() {
return "/" + name.getMethodName();
private String getDoneDirNameForTest(String name) {
return "/" + name;
}

private String getIntermediateDoneDirNameForTest() {
return "/intermediate_" + name.getMethodName();
private String getIntermediateDoneDirNameForTest(String name) {
return "/intermediate_" + name;
}

private void testTryCreateHistoryDirs(Configuration conf, boolean expected)
private void testTryCreateHistoryDirs(Configuration conf, boolean expected, String methodName)
throws Exception {
conf.set(JHAdminConfig.MR_HISTORY_DONE_DIR, getDoneDirNameForTest());
conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR, getIntermediateDoneDirNameForTest());
conf.set(JHAdminConfig.MR_HISTORY_DONE_DIR, getDoneDirNameForTest(methodName));
conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR,
getIntermediateDoneDirNameForTest(methodName));
HistoryFileManager hfm = new HistoryFileManager();
hfm.conf = conf;
Assert.assertEquals(expected, hfm.tryCreatingHistoryDirs(false));
assertEquals(expected, hfm.tryCreatingHistoryDirs(false));
}

@Test
public void testCreateDirsWithoutFileSystem() throws Exception {
public void testCreateDirsWithoutFileSystem(TestInfo testInfo) throws Exception {
Configuration conf = new YarnConfiguration();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "hdfs://localhost:1");
testTryCreateHistoryDirs(conf, false);
testTryCreateHistoryDirs(conf, false, testInfo.getDisplayName());
}

@Test
public void testCreateDirsWithFileSystem() throws Exception {
public void testCreateDirsWithFileSystem(TestInfo testInfo) throws Exception {
dfsCluster.getFileSystem().setSafeMode(
SafeModeAction.LEAVE);
Assert.assertFalse(dfsCluster.getFileSystem().isInSafeMode());
testTryCreateHistoryDirs(dfsCluster.getConfiguration(0), true);
assertFalse(dfsCluster.getFileSystem().isInSafeMode());
testTryCreateHistoryDirs(dfsCluster.getConfiguration(0), true,
testInfo.getDisplayName());
}

@Test
public void testCreateDirsWithAdditionalFileSystem() throws Exception {
public void testCreateDirsWithAdditionalFileSystem(TestInfo testInfo) throws Exception {
dfsCluster.getFileSystem().setSafeMode(
SafeModeAction.LEAVE);
dfsCluster2.getFileSystem().setSafeMode(
SafeModeAction.LEAVE);
Assert.assertFalse(dfsCluster.getFileSystem().isInSafeMode());
Assert.assertFalse(dfsCluster2.getFileSystem().isInSafeMode());
assertFalse(dfsCluster.getFileSystem().isInSafeMode());
assertFalse(dfsCluster2.getFileSystem().isInSafeMode());

// Set default configuration to the first cluster
Configuration conf = new Configuration(false);
Expand All @@ -141,25 +145,28 @@ public void testCreateDirsWithAdditionalFileSystem() throws Exception {
conf.writeXml(os);
os.close();

testTryCreateHistoryDirs(dfsCluster2.getConfiguration(0), true);
testTryCreateHistoryDirs(dfsCluster2.getConfiguration(0), true,
testInfo.getDisplayName());

// Directories should be created only in the default file system (dfsCluster)
Assert.assertTrue(dfsCluster.getFileSystem()
.exists(new Path(getDoneDirNameForTest())));
Assert.assertTrue(dfsCluster.getFileSystem()
.exists(new Path(getIntermediateDoneDirNameForTest())));
Assert.assertFalse(dfsCluster2.getFileSystem()
.exists(new Path(getDoneDirNameForTest())));
Assert.assertFalse(dfsCluster2.getFileSystem()
.exists(new Path(getIntermediateDoneDirNameForTest())));
String displayName = testInfo.getDisplayName();
assertTrue(dfsCluster.getFileSystem().
exists(new Path(getDoneDirNameForTest(displayName))));
assertTrue(dfsCluster.getFileSystem().
exists(new Path(getIntermediateDoneDirNameForTest(displayName))));
assertFalse(dfsCluster2.getFileSystem().
exists(new Path(getDoneDirNameForTest(displayName))));
assertFalse(dfsCluster2.getFileSystem().
exists(new Path(getIntermediateDoneDirNameForTest(displayName))));
}

@Test
public void testCreateDirsWithFileSystemInSafeMode() throws Exception {
public void testCreateDirsWithFileSystemInSafeMode(TestInfo testInfo) throws Exception {
dfsCluster.getFileSystem().setSafeMode(
SafeModeAction.ENTER);
Assert.assertTrue(dfsCluster.getFileSystem().isInSafeMode());
testTryCreateHistoryDirs(dfsCluster.getConfiguration(0), false);
assertTrue(dfsCluster.getFileSystem().isInSafeMode());
testTryCreateHistoryDirs(dfsCluster.getConfiguration(0), false,
testInfo.getDisplayName());
}

private void testCreateHistoryDirs(Configuration conf, Clock clock)
Expand All @@ -176,30 +183,30 @@ public void testCreateDirsWithFileSystemBecomingAvailBeforeTimeout()
throws Exception {
dfsCluster.getFileSystem().setSafeMode(
SafeModeAction.ENTER);
Assert.assertTrue(dfsCluster.getFileSystem().isInSafeMode());
assertTrue(dfsCluster.getFileSystem().isInSafeMode());
new Thread() {
@Override
public void run() {
try {
Thread.sleep(500);
dfsCluster.getFileSystem().setSafeMode(
SafeModeAction.LEAVE);
Assert.assertTrue(dfsCluster.getFileSystem().isInSafeMode());
assertTrue(dfsCluster.getFileSystem().isInSafeMode());
} catch (Exception ex) {
Assert.fail(ex.toString());
fail(ex.toString());
}
}
}.start();
testCreateHistoryDirs(dfsCluster.getConfiguration(0),
SystemClock.getInstance());
}

@Test(expected = YarnRuntimeException.class)
@Test
public void testCreateDirsWithFileSystemNotBecomingAvailBeforeTimeout()
throws Exception {
dfsCluster.getFileSystem().setSafeMode(
SafeModeAction.ENTER);
Assert.assertTrue(dfsCluster.getFileSystem().isInSafeMode());
assertTrue(dfsCluster.getFileSystem().isInSafeMode());
final ControlledClock clock = new ControlledClock();
clock.setTime(1);
new Thread() {
Expand All @@ -209,11 +216,13 @@ public void run() {
Thread.sleep(500);
clock.setTime(3000);
} catch (Exception ex) {
Assert.fail(ex.toString());
fail(ex.toString());
}
}
}.start();
testCreateHistoryDirs(dfsCluster.getConfiguration(0), clock);
assertThrows(YarnRuntimeException.class, () -> {
testCreateHistoryDirs(dfsCluster.getConfiguration(0), clock);
});
}

@Test
Expand All @@ -228,7 +237,7 @@ public void testScanDirectory() throws Exception {

//primarily, succcess is that an exception was not thrown. Also nice to
//check this
Assert.assertNotNull(lfs);
assertNotNull(lfs);

}

Expand All @@ -248,7 +257,7 @@ public void testHistoryFileInfoSummaryFileNotExist() throws Exception {
HistoryFileInfo info = hmTest.getHistoryFileInfo(null, null,
summaryFile, jobIndexInfo, false);
info.moveToDone();
Assert.assertFalse(info.didMoveFail());
assertFalse(info.didMoveFail());
}

@Test
Expand All @@ -274,8 +283,8 @@ public void testHistoryFileInfoLoadOversizedJobShouldReturnUnParsedJob()
jobIndexInfo, false);

Job job = info.loadJob();
Assert.assertTrue("Should return an instance of UnparsedJob to indicate" +
" the job history file is not parsed", job instanceof UnparsedJob);
assertTrue(job instanceof UnparsedJob, "Should return an instance of UnparsedJob to indicate" +
" the job history file is not parsed");
}

@Test
Expand Down Expand Up @@ -307,9 +316,8 @@ public void testHistoryFileInfoLoadNormalSizedJobShouldReturnCompletedJob()
null, jobIndexInfo, false);

Job job = info.loadJob();
Assert.assertTrue("Should return an instance of CompletedJob as " +
"a result of parsing the job history file of the job",
job instanceof CompletedJob);
assertTrue(job instanceof CompletedJob, "Should return an instance of CompletedJob as " +
"a result of parsing the job history file of the job");
}

@Test
Expand All @@ -336,10 +344,8 @@ public void testHistoryFileInfoShouldReturnCompletedJobIfMaxNotConfiged()
null, jobIndexInfo, false);

Job job = info.loadJob();
Assert.assertTrue("Should return an instance of CompletedJob as " +
"a result of parsing the job history file of the job",
job instanceof CompletedJob);

assertTrue(job instanceof CompletedJob, "Should return an instance of CompletedJob as " +
"a result of parsing the job history file of the job");
}

/**
Expand Down Expand Up @@ -386,10 +392,10 @@ public void testMoveToDoneAlreadyMovedSucceeds() throws Exception {
jobIndexInfo, false);
info.moveToDone();

Assert.assertFalse(info.isMovePending());
Assert.assertEquals(doneHistoryFilePath.toString(),
assertFalse(info.isMovePending());
assertEquals(doneHistoryFilePath.toString(),
info.getHistoryFile().toUri().getPath());
Assert.assertEquals(doneConfFilePath.toString(),
assertEquals(doneConfFilePath.toString(),
info.getConfFile().toUri().getPath());
}

Expand Down
Loading