Skip to content

Commit f5f0159

Browse files
author
fanshilun
committed
HADOOP-19415. Fix CheckStyle.
1 parent 08ff51a commit f5f0159

File tree

10 files changed

+48
-61
lines changed

10 files changed

+48
-61
lines changed

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -170,13 +170,11 @@ public void testStreamType() throws Exception {
170170

171171
localFs.setVerifyChecksum(true);
172172
in = localFs.open(testPath);
173-
assertTrue(
174-
in.getWrappedStream() instanceof FSInputChecker, "stream is input checker");
173+
assertTrue(in.getWrappedStream() instanceof FSInputChecker, "stream is input checker");
175174

176175
localFs.setVerifyChecksum(false);
177176
in = localFs.open(testPath);
178-
assertFalse(
179-
in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker");
177+
assertFalse(in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker");
180178
}
181179

182180
@Test

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -58,11 +58,11 @@ public void testConstructorWithQuota() {
5858
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
5959
assertEquals(length, contentSummary.getLength(), "getLength");
6060
assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount");
61-
assertEquals(directoryCount
62-
, contentSummary.getDirectoryCount(), "getDirectoryCount");
61+
assertEquals(directoryCount,
62+
contentSummary.getDirectoryCount(), "getDirectoryCount");
6363
assertEquals(quota, contentSummary.getQuota(), "getQuota");
64-
assertEquals(spaceConsumed
65-
, contentSummary.getSpaceConsumed(), "getSpaceConsumed");
64+
assertEquals(spaceConsumed,
65+
contentSummary.getSpaceConsumed(), "getSpaceConsumed");
6666
assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota");
6767
}
6868

@@ -78,8 +78,8 @@ public void testConstructorNoQuota() {
7878
spaceConsumed(length).build();
7979
assertEquals(length, contentSummary.getLength(), "getLength");
8080
assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount");
81-
assertEquals(directoryCount
82-
, contentSummary.getDirectoryCount(), "getDirectoryCount");
81+
assertEquals(directoryCount,
82+
contentSummary.getDirectoryCount(), "getDirectoryCount");
8383
assertEquals(-1, contentSummary.getQuota(), "getQuota");
8484
assertEquals(length, contentSummary.getSpaceConsumed(), "getSpaceConsumed");
8585
assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota");
@@ -131,11 +131,11 @@ public void testReadFields() throws IOException {
131131
contentSummary.readFields(in);
132132
assertEquals(length, contentSummary.getLength(), "getLength");
133133
assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount");
134-
assertEquals(directoryCount
135-
, contentSummary.getDirectoryCount(), "getDirectoryCount");
134+
assertEquals(directoryCount,
135+
contentSummary.getDirectoryCount(), "getDirectoryCount");
136136
assertEquals(quota, contentSummary.getQuota(), "getQuota");
137-
assertEquals(spaceConsumed
138-
, contentSummary.getSpaceConsumed(), "getSpaceConsumed");
137+
assertEquals(spaceConsumed,
138+
contentSummary.getSpaceConsumed(), "getSpaceConsumed");
139139
assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota");
140140
}
141141

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,7 @@ public void testMount() throws Exception {
7070
XXDF df = new XXDF();
7171
String expectedMount =
7272
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar";
73-
assertEquals(
74-
expectedMount, df.getMount(), "Invalid mount point");
73+
assertEquals(expectedMount, df.getMount(), "Invalid mount point");
7574
}
7675

7776
@Test
@@ -80,8 +79,7 @@ public void testFileSystem() throws Exception {
8079
XXDF df = new XXDF();
8180
String expectedFileSystem =
8281
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
83-
assertEquals(
84-
expectedFileSystem, df.getFilesystem(), "Invalid filesystem");
82+
assertEquals(expectedFileSystem, df.getFilesystem(), "Invalid filesystem");
8583
}
8684

8785
@Test

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -91,28 +91,25 @@ public void testDU() throws IOException, InterruptedException {
9191
long duSize = du.getUsed();
9292
du.close();
9393

94-
assertTrue(
95-
duSize >= writtenSize &&
96-
writtenSize <= (duSize + slack), "Invalid on-disk size");
94+
assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack),
95+
"Invalid on-disk size");
9796

9897
//test with 0 interval, will not launch thread
9998
du = new DU(file, 0, 1, -1);
10099
du.init();
101100
duSize = du.getUsed();
102101
du.close();
103102

104-
assertTrue(
105-
duSize >= writtenSize &&
106-
writtenSize <= (duSize + slack), "Invalid on-disk size");
103+
assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack),
104+
"Invalid on-disk size");
107105

108106
//test without launching thread
109107
du = new DU(file, 10000, 0, -1);
110108
du.init();
111109
duSize = du.getUsed();
112110

113-
assertTrue(
114-
duSize >= writtenSize &&
115-
writtenSize <= (duSize + slack), "Invalid on-disk size");
111+
assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack),
112+
"Invalid on-disk size");
116113
}
117114

118115
@Test

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,8 +87,7 @@ public void testFileStatusWritable() throws Exception {
8787
int iterator = 0;
8888
for (FileStatus fs : tests) {
8989
dest.readFields(in);
90-
assertEquals(
91-
dest, fs, "Different FileStatuses in iteration " + iterator);
90+
assertEquals(dest, fs, "Different FileStatuses in iteration " + iterator);
9291
iterator++;
9392
}
9493
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ public void testFilterFileSystem() throws Exception {
188188
}
189189
}
190190
assertTrue(errors <= 0, (errors + " methods were not overridden correctly - see" +
191-
" log"));
191+
" log"));
192192
}
193193

194194
@Test
@@ -307,11 +307,8 @@ public void testFilterPathCapabilites() throws Exception {
307307
try (FilterFileSystem flfs = new FilterLocalFileSystem()) {
308308
flfs.initialize(URI.create("filter:/"), conf);
309309
Path src = new Path("/src");
310-
assertFalse(
311-
312-
flfs.hasPathCapability(src,
313-
CommonPathCapabilities.FS_MULTIPART_UPLOADER), "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for "
314-
+ flfs);
310+
assertFalse(flfs.hasPathCapability(src, CommonPathCapabilities.FS_MULTIPART_UPLOADER),
311+
"hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + flfs);
315312
}
316313
}
317314

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import java.util.List;
2222

2323
import org.junit.jupiter.api.Test;
24-
import static org.junit.jupiter.api.Assertions.*;
24+
import static org.junit.jupiter.api.Assertions.assertEquals;
2525

2626
public class TestGlobExpander {
2727

@@ -55,11 +55,11 @@ private void checkExpansionIsIdentical(String filePattern) throws IOException {
5555
private void checkExpansion(String filePattern, String... expectedExpansions)
5656
throws IOException {
5757
List<String> actualExpansions = GlobExpander.expand(filePattern);
58-
assertEquals(expectedExpansions.length
59-
, actualExpansions.size(), "Different number of expansions");
58+
assertEquals(expectedExpansions.length,
59+
actualExpansions.size(), "Different number of expansions");
6060
for (int i = 0; i < expectedExpansions.length; i++) {
61-
assertEquals(expectedExpansions[i]
62-
, actualExpansions.get(i), "Expansion of " + filePattern);
61+
assertEquals(expectedExpansions[i],
62+
actualExpansions.get(i), "Expansion of " + filePattern);
6363
}
6464
}
6565
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -274,9 +274,8 @@ public void testMakeQualifiedPath() throws Exception {
274274
+ harPath.toUri().getPath().toString();
275275
Path path = new Path(harPathWithUserinfo);
276276
Path qualifiedPath = path.getFileSystem(conf).makeQualified(path);
277-
assertTrue(
278-
qualifiedPath.toString().equals(harPathWithUserinfo), String.format(
279-
"The qualified path (%s) did not match the expected path (%s).",
277+
assertTrue(qualifiedPath.toString().equals(harPathWithUserinfo),
278+
String.format("The qualified path (%s) did not match the expected path (%s).",
280279
qualifiedPath.toString(), harPathWithUserinfo));
281280
}
282281

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,8 @@ private static void rmBufferDirs() throws IOException {
108108

109109
private static void validateTempDirCreation(String dir) throws IOException {
110110
File result = createTempFile(SMALL_FILE_SIZE);
111-
assertTrue(
112-
result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), "Checking for " + dir + " in " + result + " - FAILED!");
111+
assertTrue(result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()),
112+
"Checking for " + dir + " in " + result + " - FAILED!");
113113
}
114114

115115
private static File createTempFile() throws IOException {

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -276,8 +276,7 @@ public void testBasicDelete() throws IOException {
276276
assertTrue(fileSys.mkdirs(dir1));
277277
writeFile(fileSys, file1, 1);
278278
writeFile(fileSys, file2, 1);
279-
assertFalse(
280-
fileSys.delete(file3), "Returned true deleting non-existant path");
279+
assertFalse(fileSys.delete(file3), "Returned true deleting non-existant path");
281280
assertTrue(fileSys.delete(file1), "Did not delete file");
282281
assertTrue(fileSys.delete(dir1), "Did not delete non-empty dir");
283282
}
@@ -314,8 +313,8 @@ public void testListStatusWithColons() throws IOException {
314313
colonFile.mkdirs();
315314
FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR));
316315
assertEquals(1, stats.length, "Unexpected number of stats");
317-
assertEquals(colonFile.getAbsolutePath()
318-
, stats[0].getPath().toUri().getPath(), "Bad path from stat");
316+
assertEquals(colonFile.getAbsolutePath(),
317+
stats[0].getPath().toUri().getPath(), "Bad path from stat");
319318
}
320319

321320
@Test
@@ -329,8 +328,8 @@ public void testListStatusReturnConsistentPathOnWindows() throws IOException {
329328
file.mkdirs();
330329
FileStatus[] stats = fileSys.listStatus(new Path(dirNoDriveSpec));
331330
assertEquals(1, stats.length, "Unexpected number of stats");
332-
assertEquals(new Path(file.getPath()).toUri().getPath()
333-
, stats[0].getPath().toUri().getPath(), "Bad path from stat");
331+
assertEquals(new Path(file.getPath()).toUri().getPath(),
332+
stats[0].getPath().toUri().getPath(), "Bad path from stat");
334333
}
335334

336335
@Test
@@ -601,8 +600,8 @@ public void testStripFragmentFromPath() throws Exception {
601600
// Create test file with fragment
602601
FileSystemTestHelper.createFile(fs, pathWithFragment);
603602
Path resolved = fs.resolvePath(pathWithFragment);
604-
assertEquals(pathQualified
605-
, resolved, "resolvePath did not strip fragment from Path");
603+
assertEquals(pathQualified,
604+
resolved, "resolvePath did not strip fragment from Path");
606605
}
607606

608607
@Test
@@ -794,8 +793,8 @@ protected Statistics getFileStatistics() {
794793
.stream()
795794
.filter(s -> s.getScheme().equals("file"))
796795
.collect(Collectors.toList());
797-
assertEquals(
798-
1, fileStats.size(), "Number of statistics counters for file://");
796+
assertEquals(1, fileStats.size(),
797+
"Number of statistics counters for file://");
799798
// this should be used for local and rawLocal, as they share the
800799
// same schema (although their class is different)
801800
return fileStats.get(0);
@@ -827,8 +826,8 @@ private void assertWritesCRC(String operation, Path path,
827826
final long bytesOut0 = stats.getBytesWritten();
828827
try {
829828
callable.call();
830-
assertEquals(
831-
CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, "Bytes written in " + operation + "; stats=" + stats);
829+
assertEquals(CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0,
830+
"Bytes written in " + operation + "; stats=" + stats);
832831
} finally {
833832
if (delete) {
834833
// clean up
@@ -857,8 +856,8 @@ public void testCRCwithClassicAPIs() throws Throwable {
857856
final long bytesRead0 = stats.getBytesRead();
858857
fileSys.open(file).close();
859858
final long bytesRead1 = stats.getBytesRead();
860-
assertEquals(
861-
CRC_SIZE, bytesRead1 - bytesRead0, "Bytes read in open() call with stats " + stats);
859+
assertEquals(CRC_SIZE, bytesRead1 - bytesRead0,
860+
"Bytes read in open() call with stats " + stats);
862861
}
863862

864863
/**
@@ -969,8 +968,8 @@ public void testReadIncludesCRCwithBuilders() throws Throwable {
969968
// now read back the data, again with the builder API
970969
final long bytesRead0 = stats.getBytesRead();
971970
fileSys.openFile(file).build().get().close();
972-
assertEquals(
973-
CRC_SIZE, stats.getBytesRead() - bytesRead0, "Bytes read in openFile() call with stats " + stats);
971+
assertEquals(CRC_SIZE, stats.getBytesRead() - bytesRead0,
972+
"Bytes read in openFile() call with stats " + stats);
974973
// now write with overwrite = true
975974
assertWritesCRC("createFileNonRecursive()",
976975
file,

0 commit comments

Comments
 (0)