Skip to content

Commit b45c09c

Browse files
author
fanshilun
committed
MAPREDUCE-7421. [JDK17] Upgrade Junit 4 to 5 in hadoop-mapreduce-client-jobclient Part2.
1 parent 81146fe commit b45c09c

File tree

80 files changed

+1168
-1124
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

80 files changed

+1168
-1124
lines changed

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java

Lines changed: 37 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,13 @@
3030
import org.apache.hadoop.mapred.HadoopTestCase;
3131
import org.apache.hadoop.mapred.JobConf;
3232
import org.apache.log4j.Level;
33-
import org.junit.Before;
34-
import org.junit.Test;
33+
import org.junit.jupiter.api.BeforeEach;
34+
import org.junit.jupiter.api.Test;
3535

36-
import static org.junit.Assert.assertTrue;
37-
import static org.junit.Assert.assertNotNull;
38-
import static org.junit.Assert.assertEquals;
39-
import static org.junit.Assert.assertFalse;
36+
import static org.junit.jupiter.api.Assertions.assertTrue;
37+
import static org.junit.jupiter.api.Assertions.assertNotNull;
38+
import static org.junit.jupiter.api.Assertions.assertEquals;
39+
import static org.junit.jupiter.api.Assertions.assertFalse;
4040

4141
public class TestChild extends HadoopTestCase {
4242
private static String TEST_ROOT_DIR =
@@ -63,25 +63,25 @@ protected void setup(Context context) throws IOException,
6363
boolean oldConfigs = conf.getBoolean(OLD_CONFIGS, false);
6464
if (oldConfigs) {
6565
String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS);
66-
assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!",
67-
javaOpts);
68-
assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
69-
javaOpts,
70-
javaOpts, TASK_OPTS_VAL);
66+
assertNotNull(
67+
javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!");
68+
assertEquals(
69+
javaOpts, TASK_OPTS_VAL, JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
70+
javaOpts);
7171
} else {
7272
String mapJavaOpts = conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS);
73-
assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!",
74-
mapJavaOpts);
75-
assertEquals(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " +
76-
mapJavaOpts,
77-
mapJavaOpts, MAP_OPTS_VAL);
73+
assertNotNull(
74+
mapJavaOpts, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!");
75+
assertEquals(
76+
mapJavaOpts, MAP_OPTS_VAL, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " +
77+
mapJavaOpts);
7878
}
7979

8080
Level logLevel =
8181
Level.toLevel(conf.get(JobConf.MAPRED_MAP_TASK_LOG_LEVEL,
8282
Level.INFO.toString()));
83-
assertEquals(JobConf.MAPRED_MAP_TASK_LOG_LEVEL + "has value of " +
84-
logLevel, logLevel, Level.OFF);
83+
assertEquals(logLevel, Level.OFF, JobConf.MAPRED_MAP_TASK_LOG_LEVEL + "has value of " +
84+
logLevel);
8585
}
8686
}
8787

@@ -95,25 +95,25 @@ protected void setup(Context context)
9595
boolean oldConfigs = conf.getBoolean(OLD_CONFIGS, false);
9696
if (oldConfigs) {
9797
String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS);
98-
assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!",
99-
javaOpts);
100-
assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
101-
javaOpts,
102-
javaOpts, TASK_OPTS_VAL);
98+
assertNotNull(
99+
javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!");
100+
assertEquals(
101+
javaOpts, TASK_OPTS_VAL, JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
102+
javaOpts);
103103
} else {
104104
String reduceJavaOpts = conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS);
105-
assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!",
106-
reduceJavaOpts);
107-
assertEquals(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " +
108-
reduceJavaOpts,
109-
reduceJavaOpts, REDUCE_OPTS_VAL);
105+
assertNotNull(
106+
reduceJavaOpts, JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!");
107+
assertEquals(
108+
reduceJavaOpts, REDUCE_OPTS_VAL, JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " +
109+
reduceJavaOpts);
110110
}
111111

112112
Level logLevel =
113113
Level.toLevel(conf.get(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL,
114114
Level.INFO.toString()));
115-
assertEquals(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL + "has value of " +
116-
logLevel, logLevel, Level.OFF);
115+
assertEquals(logLevel, Level.OFF, JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL + "has value of " +
116+
logLevel);
117117
}
118118
}
119119

@@ -135,21 +135,21 @@ private Job submitAndValidateJob(JobConf conf, int numMaps, int numReds,
135135
numMaps, numReds);
136136
job.setMapperClass(MyMapper.class);
137137
job.setReducerClass(MyReducer.class);
138-
assertFalse("Job already has a job tracker connection, before it's submitted",
139-
job.isConnected());
138+
assertFalse(
139+
job.isConnected(), "Job already has a job tracker connection, before it's submitted");
140140
job.submit();
141-
assertTrue("Job doesn't have a job tracker connection, even though it's been submitted",
142-
job.isConnected());
141+
assertTrue(
142+
job.isConnected(), "Job doesn't have a job tracker connection, even though it's been submitted");
143143
job.waitForCompletion(true);
144144
assertTrue(job.isSuccessful());
145145

146146
// Check output directory
147147
FileSystem fs = FileSystem.get(conf);
148-
assertTrue("Job output directory doesn't exit!", fs.exists(outDir));
148+
assertTrue(fs.exists(outDir), "Job output directory doesn't exit!");
149149
FileStatus[] list = fs.listStatus(outDir, new OutputFilter());
150150
int numPartFiles = numReds == 0 ? numMaps : numReds;
151-
assertTrue("Number of part-files is " + list.length + " and not "
152-
+ numPartFiles, list.length == numPartFiles);
151+
assertTrue(list.length == numPartFiles, "Number of part-files is " + list.length + " and not "
152+
+ numPartFiles);
153153
return job;
154154
}
155155

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestClientProtocolProviderImpls.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,10 @@
2727
import org.apache.hadoop.mapred.YARNRunner;
2828
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
2929
import org.apache.hadoop.util.StringUtils;
30-
import org.junit.Test;
30+
import org.junit.jupiter.api.Test;
3131

32-
import static org.junit.Assert.assertTrue;
33-
import static org.junit.Assert.fail;
32+
import static org.junit.jupiter.api.Assertions.assertTrue;
33+
import static org.junit.jupiter.api.Assertions.fail;
3434

3535
public class TestClientProtocolProviderImpls {
3636

@@ -91,9 +91,9 @@ public void testClusterExceptionRootCause() throws Exception {
9191
fail("Cluster init should fail because of non-existing FileSystem");
9292
} catch (IOException ioEx) {
9393
final String stackTrace = StringUtils.stringifyException(ioEx);
94-
assertTrue("No root cause detected",
95-
stackTrace.contains(UnsupportedFileSystemException.class.getName())
96-
&& stackTrace.contains("nosuchfs"));
94+
assertTrue(
95+
stackTrace.contains(UnsupportedFileSystemException.class.getName())
96+
&& stackTrace.contains("nosuchfs"), "No root cause detected");
9797
}
9898
}
9999
}

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestCounters.java

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323
import org.apache.hadoop.mapreduce.counters.Limits;
2424
import org.slf4j.Logger;
2525
import org.slf4j.LoggerFactory;
26-
import org.junit.Test;
26+
import org.junit.jupiter.api.Test;
2727

28-
import static org.junit.Assert.*;
28+
import static org.junit.jupiter.api.Assertions.*;
2929
/**
3030
* TestCounters checks the sanity and recoverability of {@code Counters}
3131
*/
@@ -46,19 +46,19 @@ public void testCounterValue() {
4646
long expectedValue = initValue;
4747
Counter counter = new Counters().findCounter("test", "foo");
4848
counter.setValue(initValue);
49-
assertEquals("Counter value is not initialized correctly",
50-
expectedValue, counter.getValue());
49+
assertEquals(
50+
expectedValue, counter.getValue(), "Counter value is not initialized correctly");
5151
for (int j = 0; j < NUMBER_INC; j++) {
5252
int incValue = rand.nextInt();
5353
counter.increment(incValue);
5454
expectedValue += incValue;
55-
assertEquals("Counter value is not incremented correctly",
56-
expectedValue, counter.getValue());
55+
assertEquals(
56+
expectedValue, counter.getValue(), "Counter value is not incremented correctly");
5757
}
5858
expectedValue = rand.nextInt();
5959
counter.setValue(expectedValue);
60-
assertEquals("Counter value is not set correctly",
61-
expectedValue, counter.getValue());
60+
assertEquals(
61+
expectedValue, counter.getValue(), "Counter value is not set correctly");
6262
}
6363
}
6464

@@ -148,6 +148,6 @@ private void shouldThrow(Class<? extends Exception> ecls, Runnable runnable) {
148148
LOG.info("got expected: "+ e);
149149
return;
150150
}
151-
assertTrue("Should've thrown "+ ecls.getSimpleName(), false);
151+
assertTrue(false, "Should've thrown "+ ecls.getSimpleName());
152152
}
153153
}

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLargeSort.java

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,25 @@
2323
import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
2424
import org.apache.hadoop.util.ToolRunner;
2525
import org.apache.hadoop.yarn.conf.YarnConfiguration;
26-
import org.junit.After;
27-
import org.junit.Before;
28-
import org.junit.Test;
26+
import org.junit.jupiter.api.AfterEach;
27+
import org.junit.jupiter.api.BeforeEach;
28+
import org.junit.jupiter.api.Test;
2929

3030
import java.io.IOException;
3131

32-
import static org.junit.Assert.assertEquals;
32+
import static org.junit.jupiter.api.Assertions.assertEquals;
3333

3434
public class TestLargeSort {
3535
MiniMRClientCluster cluster;
3636

37-
@Before
37+
@BeforeEach
3838
public void setup() throws IOException {
3939
Configuration conf = new YarnConfiguration();
4040
cluster = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
4141
cluster.start();
4242
}
4343

44-
@After
44+
@AfterEach
4545
public void cleanup() throws IOException {
4646
if (cluster != null) {
4747
cluster.stop();
@@ -59,8 +59,8 @@ public void testLargeSort() throws Exception {
5959
conf.setInt(MRJobConfig.IO_SORT_MB, ioSortMb);
6060
conf.setInt(LargeSorter.NUM_MAP_TASKS, 1);
6161
conf.setInt(LargeSorter.MBS_PER_MAP, ioSortMb);
62-
assertEquals("Large sort failed for " + ioSortMb, 0,
63-
ToolRunner.run(conf, new LargeSorter(), args));
62+
assertEquals(0
63+
, ToolRunner.run(conf, new LargeSorter(), args), "Large sort failed for " + ioSortMb);
6464
}
6565
}
6666
}

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestLocalRunner.java

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,8 @@
3030
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
3131
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
3232
import org.apache.hadoop.util.ReflectionUtils;
33-
import org.junit.Test;
33+
import org.junit.jupiter.api.Test;
34+
import org.junit.jupiter.api.Timeout;
3435
import org.slf4j.Logger;
3536
import org.slf4j.LoggerFactory;
3637

@@ -44,10 +45,10 @@
4445
import java.util.ArrayList;
4546
import java.util.List;
4647

47-
import static org.junit.Assert.assertEquals;
48-
import static org.junit.Assert.assertFalse;
49-
import static org.junit.Assert.assertNotNull;
50-
import static org.junit.Assert.assertTrue;
48+
import static org.junit.jupiter.api.Assertions.assertEquals;
49+
import static org.junit.jupiter.api.Assertions.assertFalse;
50+
import static org.junit.jupiter.api.Assertions.assertNotNull;
51+
import static org.junit.jupiter.api.Assertions.assertTrue;
5152

5253
/**
5354
* Stress tests for the LocalJobRunner
@@ -235,9 +236,9 @@ private void verifyOutput(Path outputPath) throws IOException {
235236

236237
// Should get a single line of the form "0\t(count)"
237238
String line = r.readLine().trim();
238-
assertTrue("Line does not have correct key", line.startsWith("0\t"));
239+
assertTrue(line.startsWith("0\t"), "Line does not have correct key");
239240
int count = Integer.valueOf(line.substring(2));
240-
assertEquals("Incorrect count generated!", TOTAL_RECORDS, count);
241+
assertEquals(TOTAL_RECORDS, count, "Incorrect count generated!");
241242

242243
r.close();
243244

@@ -276,23 +277,24 @@ public void testGcCounter() throws Exception {
276277
FileOutputFormat.setOutputPath(job, outputPath);
277278

278279
boolean ret = job.waitForCompletion(true);
279-
assertTrue("job failed", ret);
280+
assertTrue(ret, "job failed");
280281

281282
// This job should have done *some* gc work.
282283
// It had to clean up 400,000 objects.
283284
// We strongly suspect this will result in a few milliseconds effort.
284285
Counter gcCounter = job.getCounters().findCounter(
285286
TaskCounter.GC_TIME_MILLIS);
286287
assertNotNull(gcCounter);
287-
assertTrue("No time spent in gc", gcCounter.getValue() > 0);
288+
assertTrue(gcCounter.getValue() > 0, "No time spent in gc");
288289
}
289290

290291

291292
/**
292293
* Run a test with several mappers in parallel, operating at different
293294
* speeds. Verify that the correct amount of output is created.
294295
*/
295-
@Test(timeout=120*1000)
296+
@Test
297+
@Timeout(value=120)
296298
public void testMultiMaps() throws Exception {
297299
Job job = Job.getInstance();
298300

@@ -377,7 +379,7 @@ public void testInvalidMultiMapParallelism() throws Exception {
377379
FileOutputFormat.setOutputPath(job, outputPath);
378380

379381
boolean success = job.waitForCompletion(true);
380-
assertFalse("Job succeeded somehow", success);
382+
assertFalse(success, "Job succeeded somehow");
381383
}
382384

383385
/** An IF that creates no splits */
@@ -434,7 +436,7 @@ public void testEmptyMaps() throws Exception {
434436
FileOutputFormat.setOutputPath(job, outputPath);
435437

436438
boolean success = job.waitForCompletion(true);
437-
assertTrue("Empty job should work", success);
439+
assertTrue(success, "Empty job should work");
438440
}
439441

440442
/** @return the directory where numberfiles are written (mapper inputs) */
@@ -510,7 +512,7 @@ private void verifyNumberJob(int numMaps) throws Exception {
510512
int expectedPerMapper = maxVal * (maxVal + 1) / 2;
511513
int expectedSum = expectedPerMapper * numMaps;
512514
LOG.info("expected sum: " + expectedSum + ", got " + valueSum);
513-
assertEquals("Didn't get all our results back", expectedSum, valueSum);
515+
assertEquals(expectedSum, valueSum, "Didn't get all our results back");
514516
}
515517

516518
/**
@@ -551,7 +553,7 @@ private void doMultiReducerTest(int numMaps, int numReduces,
551553
LocalJobRunner.setLocalMaxRunningReduces(job, parallelReduces);
552554

553555
boolean result = job.waitForCompletion(true);
554-
assertTrue("Job failed!!", result);
556+
assertTrue(result, "Job failed!!");
555557

556558
verifyNumberJob(numMaps);
557559
}

0 commit comments

Comments
 (0)