Skip to content

Commit c16bcc2

Browse files
author
fanshilun
committed
MAPREDUCE-7420. Fix CheckStyle.
1 parent 00f548e commit c16bcc2

File tree

16 files changed

+123
-144
lines changed

16 files changed

+123
-144
lines changed

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractFSContractTestBase.java

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -250,20 +250,6 @@ protected Path methodPath() throws IOException {
250250
return path(methodName.getMethodName());
251251
}
252252

253-
/**
254-
* Get a path whose name ends with the name of this method.
255-
*
256-
* This is a temporary requirement for upgrading from JUnit 4 to JUnit 5,
257-
* and can be deleted later.
258-
*
259-
* @param name name of this method.
260-
* @return a path implicitly unique amongst all methods in this class
261-
* @throws IOException IO problems
262-
*/
263-
protected Path methodPath(String name) throws IOException {
264-
return path(name);
265-
}
266-
267253
/**
268254
* Take a simple path like "/something" and turn it into
269255
* a qualified path against the test FS.

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,8 @@ private void checkLegacyNames(Counters counters) {
187187
assertEquals(1, counters.findCounter(
188188
"org.apache.hadoop.mapred.JobInProgress$Counter",
189189
"DATA_LOCAL_MAPS").getValue(), "Legacy name");
190-
assertEquals(1, counters.findCounter(JobInProgress.Counter.DATA_LOCAL_MAPS).getValue(), "Legacy enum");
190+
assertEquals(1,
191+
counters.findCounter(JobInProgress.Counter.DATA_LOCAL_MAPS).getValue(), "Legacy enum");
191192

192193
assertEquals(1, counters.findCounter(
193194
FileSystemCounter.class.getName(), "FILE_BYTES_READ").getValue(), "New name");

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -59,9 +59,9 @@ public class TestFileInputFormat {
5959

6060
private int numThreads;
6161

62-
public void initTestFileInputFormat(int numThreads) {
63-
this.numThreads = numThreads;
64-
LOG.info("Running with numThreads: " + numThreads);
62+
public void initTestFileInputFormat(int pNumThreads) {
63+
this.numThreads = pNumThreads;
64+
LOG.info("Running with numThreads: " + pNumThreads);
6565
}
6666

6767
public static Collection<Object[]> data() {
@@ -84,8 +84,8 @@ public void cleanup() throws IOException {
8484

8585
@MethodSource("data")
8686
@ParameterizedTest
87-
public void testListLocatedStatus(int numThreads) throws Exception {
88-
initTestFileInputFormat(numThreads);
87+
public void testListLocatedStatus(int pNumThreads) throws Exception {
88+
initTestFileInputFormat(pNumThreads);
8989
Configuration conf = getConfiguration();
9090
conf.setBoolean("fs.test.impl.disable.cache", false);
9191
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
@@ -106,8 +106,8 @@ public void testListLocatedStatus(int numThreads) throws Exception {
106106

107107
@MethodSource("data")
108108
@ParameterizedTest
109-
public void testIgnoreDirs(int numThreads) throws Exception {
110-
initTestFileInputFormat(numThreads);
109+
public void testIgnoreDirs(int pNumThreads) throws Exception {
110+
initTestFileInputFormat(pNumThreads);
111111
Configuration conf = getConfiguration();
112112
conf.setBoolean(FileInputFormat.INPUT_DIR_NONRECURSIVE_IGNORE_SUBDIRS, true);
113113
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
@@ -123,8 +123,8 @@ public void testIgnoreDirs(int numThreads) throws Exception {
123123

124124
@MethodSource("data")
125125
@ParameterizedTest
126-
public void testSplitLocationInfo(int numThreads) throws Exception {
127-
initTestFileInputFormat(numThreads);
126+
public void testSplitLocationInfo(int pNumThreads) throws Exception {
127+
initTestFileInputFormat(pNumThreads);
128128
Configuration conf = getConfiguration();
129129
conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
130130
"test:///a1/a2");
@@ -148,8 +148,8 @@ public void testSplitLocationInfo(int numThreads) throws Exception {
148148

149149
@MethodSource("data")
150150
@ParameterizedTest
151-
public void testListStatusSimple(int numThreads) throws IOException {
152-
initTestFileInputFormat(numThreads);
151+
public void testListStatusSimple(int pNumThreads) throws IOException {
152+
initTestFileInputFormat(pNumThreads);
153153
Configuration conf = new Configuration();
154154
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
155155

@@ -167,8 +167,8 @@ public void testListStatusSimple(int numThreads) throws IOException {
167167

168168
@MethodSource("data")
169169
@ParameterizedTest
170-
public void testListStatusNestedRecursive(int numThreads) throws IOException {
171-
initTestFileInputFormat(numThreads);
170+
public void testListStatusNestedRecursive(int pNumThreads) throws IOException {
171+
initTestFileInputFormat(pNumThreads);
172172
Configuration conf = new Configuration();
173173
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
174174

@@ -186,8 +186,8 @@ public void testListStatusNestedRecursive(int numThreads) throws IOException {
186186

187187
@MethodSource("data")
188188
@ParameterizedTest
189-
public void testListStatusNestedNonRecursive(int numThreads) throws IOException {
190-
initTestFileInputFormat(numThreads);
189+
public void testListStatusNestedNonRecursive(int pNumThreads) throws IOException {
190+
initTestFileInputFormat(pNumThreads);
191191
Configuration conf = new Configuration();
192192
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
193193

@@ -205,8 +205,8 @@ public void testListStatusNestedNonRecursive(int numThreads) throws IOException
205205

206206
@MethodSource("data")
207207
@ParameterizedTest
208-
public void testListStatusErrorOnNonExistantDir(int numThreads) throws IOException {
209-
initTestFileInputFormat(numThreads);
208+
public void testListStatusErrorOnNonExistantDir(int pNumThreads) throws IOException {
209+
initTestFileInputFormat(pNumThreads);
210210
Configuration conf = new Configuration();
211211
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
212212

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -148,12 +148,12 @@ private void testRecoveryInternal(int commitVersion, int recoveryVersion)
148148
assertTrue(jtd2.exists(), "Version 1 recovers to " + jtd2);
149149
validateContent(jobTempDir2);
150150
} else {
151-
assertFalse(jtd2.exists(), "Version 2 commits to output dir " + jtd2);
152-
if (commitVersion == 1) {
153-
assertEquals(0, jtd1.list().length,
154-
"Version 2 recovery moves to output dir from " + jtd1);
155-
}
151+
assertFalse(jtd2.exists(), "Version 2 commits to output dir " + jtd2);
152+
if (commitVersion == 1) {
153+
assertEquals(0, jtd1.list().length,
154+
"Version 2 recovery moves to output dir from " + jtd1);
156155
}
156+
}
157157

158158
committer2.commitJob(jContext2);
159159
validateContent(outDir);

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ public void testSafeguardSplittingUnSplittableFiles() throws IOException {
191191
// The LineRecordReader must fail when trying to read a file that
192192
// was compressed using an unsplittable file format
193193
assertThrows(IOException.class, () -> {
194-
testSplitRecords("TestSafeguardSplittingUnsplittableFiles.txt.gz", 2);
194+
testSplitRecords("TestSafeguardSplittingUnsplittableFiles.txt.gz", 2);
195195
});
196196
}
197197

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -101,25 +101,25 @@ public void testShufflePermissions() throws Exception {
101101
public void testSpillFilesCountLimitInvalidValue() throws Exception {
102102
String message = "Invalid value for \"mapreduce.task.spill.files.count.limit\", " +
103103
"current value: -2";
104-
assertThrows(IOException.class,() -> {
105-
JobConf conf = new JobConf();
106-
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
107-
conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
108-
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2);
109-
MapOutputFile mof = new MROutputFiles();
110-
mof.setConf(conf);
111-
TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1);
112-
MapTask mockTask = mock(MapTask.class);
113-
doReturn(mof).when(mockTask).getMapOutputFile();
114-
doReturn(attemptId).when(mockTask).getTaskID();
115-
doReturn(new Progress()).when(mockTask).getSortPhase();
116-
TaskReporter mockReporter = mock(TaskReporter.class);
117-
doReturn(new Counter()).when(mockReporter).getCounter(any(TaskCounter.class));
118-
MapOutputCollector.Context ctx = new MapOutputCollector.Context(mockTask, conf, mockReporter);
119-
MapOutputBuffer<Object, Object> mob = new MapOutputBuffer<>();
104+
assertThrows(IOException.class, () -> {
105+
JobConf conf = new JobConf();
106+
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
107+
conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
108+
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2);
109+
MapOutputFile mof = new MROutputFiles();
110+
mof.setConf(conf);
111+
TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1);
112+
MapTask mockTask = mock(MapTask.class);
113+
doReturn(mof).when(mockTask).getMapOutputFile();
114+
doReturn(attemptId).when(mockTask).getTaskID();
115+
doReturn(new Progress()).when(mockTask).getSortPhase();
116+
TaskReporter mockReporter = mock(TaskReporter.class);
117+
doReturn(new Counter()).when(mockReporter).getCounter(any(TaskCounter.class));
118+
MapOutputCollector.Context ctx = new MapOutputCollector.Context(mockTask, conf, mockReporter);
119+
MapOutputBuffer<Object, Object> mob = new MapOutputBuffer<>();
120120

121-
mob.init(ctx);
122-
mob.close();
121+
mob.init(ctx);
122+
mob.close();
123123
}, message);
124124
}
125125

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/lib/TestCombineFileRecordReader.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ public void testInitNextRecordReader() throws IOException{
7777
CombineFileRecordReader cfrr = new CombineFileRecordReader(conf, combineFileSplit,
7878
reporter, TextRecordReaderWrapper.class);
7979
verify(reporter).progress();
80-
assertFalse(cfrr.next(key,value));
80+
assertFalse(cfrr.next(key, value));
8181
verify(reporter, times(3)).progress();
8282
} finally {
8383
FileUtil.fullyDelete(new File(outDir.toString()));

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobSubmissionFiles.java

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -85,20 +85,20 @@ public void testGetStagingDirWhenFullFileOwnerNameAndFullUserName()
8585
public void testGetStagingWhenFileOwnerNameAndCurrentUserNameDoesNotMatch()
8686
throws IOException, InterruptedException {
8787
assertThrows(IOException.class, () -> {
88-
Cluster cluster = mock(Cluster.class);
89-
Configuration conf = new Configuration();
90-
String stagingDirOwner = "someuser";
91-
Path stagingPath = mock(Path.class);
92-
UserGroupInformation user = UserGroupInformation.createUserForTesting(USER_1, GROUP_NAMES);
93-
assertEquals(USER_1, user.getUserName());
94-
FileSystem fs = new FileSystemTestHelper.MockFileSystem();
95-
FileStatus fileStatus = new FileStatus(1, true, 1, 1, 100L, 100L,
96-
FsPermission.getDefault(), stagingDirOwner, stagingDirOwner, stagingPath);
97-
when(stagingPath.getFileSystem(conf)).thenReturn(fs);
98-
when(fs.getFileStatus(stagingPath)).thenReturn(fileStatus);
99-
when(cluster.getStagingAreaDir()).thenReturn(stagingPath);
100-
assertEquals(stagingPath,
101-
JobSubmissionFiles.getStagingDir(cluster, conf, user));
88+
Cluster cluster = mock(Cluster.class);
89+
Configuration conf = new Configuration();
90+
String stagingDirOwner = "someuser";
91+
Path stagingPath = mock(Path.class);
92+
UserGroupInformation user = UserGroupInformation.createUserForTesting(USER_1, GROUP_NAMES);
93+
assertEquals(USER_1, user.getUserName());
94+
FileSystem fs = new FileSystemTestHelper.MockFileSystem();
95+
FileStatus fileStatus = new FileStatus(1, true, 1, 1, 100L, 100L,
96+
FsPermission.getDefault(), stagingDirOwner, stagingDirOwner, stagingPath);
97+
when(stagingPath.getFileSystem(conf)).thenReturn(fs);
98+
when(fs.getFileStatus(stagingPath)).thenReturn(fileStatus);
99+
when(cluster.getStagingAreaDir()).thenReturn(stagingPath);
100+
assertEquals(stagingPath,
101+
JobSubmissionFiles.getStagingDir(cluster, conf, user));
102102
});
103103
}
104104

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestTaskID.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import org.junit.jupiter.api.Test;
2727
import static org.junit.jupiter.api.Assertions.assertEquals;
2828
import static org.junit.jupiter.api.Assertions.assertFalse;
29-
import static org.junit.jupiter.api.Assertions.assertNotEquals;
3029
import static org.junit.jupiter.api.Assertions.assertNull;
3130
import static org.junit.jupiter.api.Assertions.assertSame;
3231
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -258,7 +257,7 @@ public void testHashCode() {
258257
TaskID taskId2 = new TaskID(jobId, types[i], i);
259258

260259
assertEquals(taskId1.hashCode(), taskId2.hashCode(),
261-
"The hashcode() method gave unequal hash codes for two equal task IDs");
260+
"The hashcode() method gave unequal hash codes for two equal task IDs");
262261
}
263262
}
264263

@@ -331,7 +330,8 @@ public void testForName() {
331330
"The forName() method did not parse the task ID string correctly");
332331
assertEquals("task_6789_0004_c_000003", TaskID.forName("task_6789_0004_c_000003").toString(),
333332
"The forName() method did not parse the task ID string correctly");
334-
assertEquals("task_12345_0005_t_4000000", TaskID.forName("task_12345_0005_t_4000000").toString(),
333+
assertEquals("task_12345_0005_t_4000000",
334+
TaskID.forName("task_12345_0005_t_4000000").toString(),
335335
"The forName() method did not parse the task ID string correctly");
336336

337337
try {
@@ -456,7 +456,7 @@ public void testGetTaskTypeChar() {
456456
*/
457457
@Test
458458
public void testGetAllTaskTypes() {
459-
assertEquals( "(m|r|s|c|t)", TaskID.getAllTaskTypes(),
459+
assertEquals("(m|r|s|c|t)", TaskID.getAllTaskTypes(),
460460
"The getAllTaskTypes method did not return the expected string");
461461
}
462462
}

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/filecache/TestClientDistributedCacheManager.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -153,8 +153,8 @@ public void testDetermineCacheVisibilities() throws IOException {
153153
// We use get() instead of getBoolean() so we can tell the difference
154154
// between wrong and missing
155155
assertEquals("true,true", jobConf.get(MRJobConfig.CACHE_FILE_VISIBILITIES),
156-
"The file paths were not found to be publicly visible " +
157-
"even though the full path is publicly accessible");
156+
"The file paths were not found to be publicly visible " +
157+
"even though the full path is publicly accessible");
158158
checkCacheEntries(statCache, null, firstCacheFile, relativePath);
159159

160160
job = Job.getInstance(conf);
@@ -167,8 +167,8 @@ public void testDetermineCacheVisibilities() throws IOException {
167167
// We use get() instead of getBoolean() so we can tell the difference
168168
// between wrong and missing
169169
assertEquals("true", jobConf.get(MRJobConfig.CACHE_FILE_VISIBILITIES),
170-
"The file path was not found to be publicly visible " +
171-
"even though the full path is publicly accessible");
170+
"The file path was not found to be publicly visible " +
171+
"even though the full path is publicly accessible");
172172
checkCacheEntries(statCache, null, wildcardPath.getParent());
173173

174174
Path qualifiedParent = fs.makeQualified(TEST_VISIBILITY_PARENT_DIR);

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/filecache/TestDistributedCache.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,9 @@ public void testAddFileToClassPath() throws Exception {
4747

4848
DistributedCache.addFileToClassPath(new Path("file:///a"), conf);
4949
assertEquals("file:/a", conf.get(MRJobConfig.CLASSPATH_FILES),
50-
"The mapreduce.job.classpath.files property was not set correctly");
50+
"The mapreduce.job.classpath.files property was not set correctly");
5151
assertEquals("file:///a", conf.get(MRJobConfig.CACHE_FILES),
52-
"The mapreduce.job.cache.files property was not set correctly");
52+
"The mapreduce.job.cache.files property was not set correctly");
5353

5454
DistributedCache.addFileToClassPath(new Path("file:///b"), conf);
5555
assertEquals("file:/a,file:/b", conf.get(MRJobConfig.CLASSPATH_FILES),
@@ -70,9 +70,9 @@ public void testAddFileToClassPath() throws Exception {
7070

7171
DistributedCache.addFileToClassPath(new Path("file:///a"), conf, fs);
7272
assertEquals("file:/a", conf.get(MRJobConfig.CLASSPATH_FILES),
73-
"The mapreduce.job.classpath.files property was not set correctly");
73+
"The mapreduce.job.classpath.files property was not set correctly");
7474
assertEquals("file:///a", conf.get(MRJobConfig.CACHE_FILES),
75-
"The mapreduce.job.cache.files property was not set correctly");
75+
"The mapreduce.job.cache.files property was not set correctly");
7676

7777
DistributedCache.addFileToClassPath(new Path("file:///b"), conf, fs);
7878
assertEquals("file:/a,file:/b", conf.get(MRJobConfig.CLASSPATH_FILES),
@@ -93,15 +93,15 @@ public void testAddFileToClassPath() throws Exception {
9393

9494
DistributedCache.addFileToClassPath(new Path("file:///a"), conf, fs, true);
9595
assertEquals("file:/a", conf.get(MRJobConfig.CLASSPATH_FILES),
96-
"The mapreduce.job.classpath.files property was not set correctly");
96+
"The mapreduce.job.classpath.files property was not set correctly");
9797
assertEquals( "file:///a", conf.get(MRJobConfig.CACHE_FILES),
98-
"The mapreduce.job.cache.files property was not set correctly");
98+
"The mapreduce.job.cache.files property was not set correctly");
9999

100100
DistributedCache.addFileToClassPath(new Path("file:///b"), conf, fs, true);
101101
assertEquals("file:/a,file:/b", conf.get(MRJobConfig.CLASSPATH_FILES),
102-
"The mapreduce.job.classpath.files property was not set correctly");
102+
"The mapreduce.job.classpath.files property was not set correctly");
103103
assertEquals("file:///a,file:///b", conf.get(MRJobConfig.CACHE_FILES),
104-
"The mapreduce.job.cache.files property was not set correctly");
104+
"The mapreduce.job.cache.files property was not set correctly");
105105

106106
// And finally with 4th arg false
107107
conf.clear();
@@ -115,14 +115,14 @@ public void testAddFileToClassPath() throws Exception {
115115

116116
DistributedCache.addFileToClassPath(new Path("file:///a"), conf, fs, false);
117117
assertEquals("file:/a", conf.get(MRJobConfig.CLASSPATH_FILES),
118-
"The mapreduce.job.classpath.files property was not set correctly");
118+
"The mapreduce.job.classpath.files property was not set correctly");
119119
assertEquals("", conf.get(MRJobConfig.CACHE_FILES, ""),
120-
"The mapreduce.job.cache.files property was not set correctly");
120+
"The mapreduce.job.cache.files property was not set correctly");
121121

122122
DistributedCache.addFileToClassPath(new Path("file:///b"), conf, fs, false);
123123
assertEquals("file:/a,file:/b", conf.get(MRJobConfig.CLASSPATH_FILES),
124-
"The mapreduce.job.classpath.files property was not set correctly");
124+
"The mapreduce.job.classpath.files property was not set correctly");
125125
assertEquals("", conf.get(MRJobConfig.CACHE_FILES, ""),
126-
"The mapreduce.job.cache.files property was not set correctly");
126+
"The mapreduce.job.cache.files property was not set correctly");
127127
}
128128
}

0 commit comments

Comments
 (0)