17
17
*/
18
18
package org .apache .hadoop .mapred ;
19
19
20
- import static org .junit .Assert .assertEquals ;
21
- import static org .junit .Assert .assertFalse ;
22
- import static org .junit .Assert .assertTrue ;
20
+ import static org .junit .jupiter .api .Assertions .assertEquals ;
21
+ import static org .junit .jupiter .api .Assertions .assertFalse ;
22
+ import static org .junit .jupiter .api .Assertions .assertNotNull ;
23
+ import static org .junit .jupiter .api .Assertions .assertNull ;
24
+ import static org .junit .jupiter .api .Assertions .assertTrue ;
25
+ import static org .junit .jupiter .api .Assertions .fail ;
23
26
24
27
import java .io .IOException ;
25
28
import java .text .ParseException ;
26
29
import java .util .HashSet ;
27
30
import java .util .Iterator ;
28
31
import java .util .Random ;
29
32
30
- import org .junit .Assert ;
31
-
32
33
import org .apache .hadoop .mapred .Counters .Counter ;
33
34
import org .apache .hadoop .mapred .Counters .CountersExceededException ;
34
35
import org .apache .hadoop .mapred .Counters .Group ;
38
39
import org .apache .hadoop .mapreduce .TaskCounter ;
39
40
import org .apache .hadoop .mapreduce .counters .FrameworkCounterGroup ;
40
41
import org .apache .hadoop .mapreduce .counters .CounterGroupFactory .FrameworkGroupFactory ;
41
- import org .junit .Test ;
42
+ import org .junit .jupiter . api . Test ;
42
43
import org .slf4j .Logger ;
43
44
import org .slf4j .LoggerFactory ;
44
45
@@ -85,14 +86,14 @@ private Counters getEnumCounters(String[] gNames, String[] cNames) {
85
86
*/
86
87
private void testCounter (Counters counter ) throws ParseException {
87
88
String compactEscapedString = counter .makeEscapedCompactString ();
88
- assertFalse (" compactEscapedString should not contain null" ,
89
- compactEscapedString . contains ( " null") );
89
+ assertFalse (compactEscapedString . contains ( " null") ,
90
+ "compactEscapedString should not contain null" );
90
91
91
92
Counters recoveredCounter =
92
93
Counters .fromEscapedCompactString (compactEscapedString );
93
94
// Check for recovery from string
94
- assertEquals ("Recovered counter does not match on content" ,
95
- counter , recoveredCounter );
95
+ assertEquals (counter , recoveredCounter ,
96
+ "Recovered counter does not match on content" );
96
97
}
97
98
98
99
@ Test
@@ -134,19 +135,19 @@ public void testCounterValue() {
134
135
long expectedValue = initValue ;
135
136
Counter counter = counters .findCounter ("foo" , "bar" );
136
137
counter .setValue (initValue );
137
- assertEquals ("Counter value is not initialized correctly" ,
138
- expectedValue , counter . getValue () );
138
+ assertEquals (expectedValue , counter . getValue () ,
139
+ "Counter value is not initialized correctly" );
139
140
for (int j = 0 ; j < NUMBER_INC ; j ++) {
140
141
int incValue = rand .nextInt ();
141
142
counter .increment (incValue );
142
143
expectedValue += incValue ;
143
- assertEquals ("Counter value is not incremented correctly" ,
144
- expectedValue , counter . getValue () );
144
+ assertEquals (expectedValue , counter . getValue () ,
145
+ "Counter value is not incremented correctly" );
145
146
}
146
147
expectedValue = rand .nextInt ();
147
148
counter .setValue (expectedValue );
148
- assertEquals ("Counter value is not set correctly" ,
149
- expectedValue , counter . getValue () );
149
+ assertEquals (expectedValue , counter . getValue () ,
150
+ "Counter value is not set correctly" );
150
151
}
151
152
}
152
153
@@ -174,29 +175,28 @@ public void testWriteWithLegacyNames() {
174
175
175
176
@ SuppressWarnings ("deprecation" )
176
177
private void checkLegacyNames (Counters counters ) {
177
- assertEquals ("New name" , 1 , counters .findCounter (
178
- TaskCounter .class .getName (), "MAP_INPUT_RECORDS" ).getValue ());
179
- assertEquals ("Legacy name" , 1 , counters .findCounter (
178
+ assertEquals (1 , counters .findCounter (
179
+ TaskCounter .class .getName (), "MAP_INPUT_RECORDS" ).getValue (), "New name" );
180
+ assertEquals (1 , counters .findCounter (
180
181
"org.apache.hadoop.mapred.Task$Counter" ,
181
- "MAP_INPUT_RECORDS" ).getValue ());
182
- assertEquals ("Legacy enum" , 1 ,
183
- counters .findCounter (Task .Counter .MAP_INPUT_RECORDS ).getValue ());
182
+ "MAP_INPUT_RECORDS" ).getValue (), "Legacy name" );
183
+ assertEquals (1 , counters .findCounter (Task .Counter .MAP_INPUT_RECORDS ).getValue (), "Legacy enum" );
184
184
185
- assertEquals ("New name" , 1 , counters .findCounter (
186
- JobCounter .class .getName (), "DATA_LOCAL_MAPS" ).getValue ());
187
- assertEquals ("Legacy name" , 1 , counters .findCounter (
185
+ assertEquals (1 , counters .findCounter (
186
+ JobCounter .class .getName (), "DATA_LOCAL_MAPS" ).getValue (), "New name" );
187
+ assertEquals (1 , counters .findCounter (
188
188
"org.apache.hadoop.mapred.JobInProgress$Counter" ,
189
- "DATA_LOCAL_MAPS" ).getValue ());
190
- assertEquals ("Legacy enum" , 1 ,
191
- counters .findCounter (JobInProgress .Counter .DATA_LOCAL_MAPS ).getValue ());
189
+ "DATA_LOCAL_MAPS" ).getValue (), "Legacy name" );
190
+ assertEquals (1 ,
191
+ counters .findCounter (JobInProgress .Counter .DATA_LOCAL_MAPS ).getValue (), "Legacy enum" );
192
192
193
- assertEquals ("New name" , 1 , counters .findCounter (
194
- FileSystemCounter .class .getName (), "FILE_BYTES_READ" ).getValue ());
195
- assertEquals ("New name and method" , 1 , counters .findCounter ("file" ,
196
- FileSystemCounter .BYTES_READ ).getValue ());
197
- assertEquals ("Legacy name" , 1 , counters .findCounter (
193
+ assertEquals (1 , counters .findCounter (
194
+ FileSystemCounter .class .getName (), "FILE_BYTES_READ" ).getValue (), "New name" );
195
+ assertEquals (1 , counters .findCounter ("file" ,
196
+ FileSystemCounter .BYTES_READ ).getValue (), "New name and method" );
197
+ assertEquals (1 , counters .findCounter (
198
198
"FileSystemCounters" ,
199
- "FILE_BYTES_READ" ).getValue ());
199
+ "FILE_BYTES_READ" ).getValue (), "Legacy name" );
200
200
}
201
201
202
202
@ SuppressWarnings ("deprecation" )
@@ -266,8 +266,8 @@ public void testMakeCompactString() {
266
266
assertEquals ("group1.counter1:1" , counters .makeCompactString ());
267
267
counters .incrCounter ("group2" , "counter2" , 3 );
268
268
String cs = counters .makeCompactString ();
269
- assertTrue ("Bad compact string" ,
270
- cs . equals ( GC1 + ',' + GC2 ) || cs . equals ( GC2 + ',' + GC1 ) );
269
+ assertTrue (cs . equals ( GC1 + ',' + GC2 ) || cs . equals ( GC2 + ',' + GC1 ) ,
270
+ "Bad compact string" );
271
271
}
272
272
273
273
@ Test
@@ -321,7 +321,7 @@ private void shouldThrow(Class<? extends Exception> ecls, Runnable runnable) {
321
321
} catch (CountersExceededException e ) {
322
322
return ;
323
323
}
324
- Assert . fail ("Should've thrown " + ecls .getSimpleName ());
324
+ fail ("Should've thrown " + ecls .getSimpleName ());
325
325
}
326
326
327
327
public static void main (String [] args ) throws IOException {
@@ -341,12 +341,12 @@ public void testFrameworkCounter() {
341
341
342
342
org .apache .hadoop .mapreduce .Counter count1 =
343
343
counterGroup .findCounter (JobCounter .NUM_FAILED_MAPS .toString ());
344
- Assert . assertNotNull (count1 );
344
+ assertNotNull (count1 );
345
345
346
346
// Verify no exception get thrown when finding an unknown counter
347
347
org .apache .hadoop .mapreduce .Counter count2 =
348
348
counterGroup .findCounter ("Unknown" );
349
- Assert . assertNull (count2 );
349
+ assertNull (count2 );
350
350
}
351
351
352
352
@ SuppressWarnings ("rawtypes" )
@@ -363,19 +363,19 @@ public void testTaskCounter() {
363
363
org .apache .hadoop .mapreduce .Counter count1 =
364
364
counterGroup .findCounter (
365
365
TaskCounter .PHYSICAL_MEMORY_BYTES .toString ());
366
- Assert . assertNotNull (count1 );
366
+ assertNotNull (count1 );
367
367
count1 .increment (10 );
368
368
count1 .increment (10 );
369
- Assert . assertEquals (20 , count1 .getValue ());
369
+ assertEquals (20 , count1 .getValue ());
370
370
371
371
// Verify no exception get thrown when finding an unknown counter
372
372
org .apache .hadoop .mapreduce .Counter count2 =
373
373
counterGroup .findCounter (
374
374
TaskCounter .MAP_PHYSICAL_MEMORY_BYTES_MAX .toString ());
375
- Assert . assertNotNull (count2 );
375
+ assertNotNull (count2 );
376
376
count2 .increment (5 );
377
377
count2 .increment (10 );
378
- Assert . assertEquals (10 , count2 .getValue ());
378
+ assertEquals (10 , count2 .getValue ());
379
379
}
380
380
381
381
@ Test
@@ -385,12 +385,12 @@ public void testFilesystemCounter() {
385
385
386
386
org .apache .hadoop .mapreduce .Counter count1 =
387
387
fsGroup .findCounter ("ANY_BYTES_READ" );
388
- Assert . assertNotNull (count1 );
388
+ assertNotNull (count1 );
389
389
390
390
// Verify no exception get thrown when finding an unknown counter
391
391
org .apache .hadoop .mapreduce .Counter count2 =
392
392
fsGroup .findCounter ("Unknown" );
393
- Assert . assertNull (count2 );
393
+ assertNull (count2 );
394
394
}
395
395
396
396
}
0 commit comments