diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 7a7e0e853e162..1d9fc166c745a 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -381,6 +381,26 @@
lz4-java
provided
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.junit.platform
+ junit-platform-launcher
+ test
+
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java
index da429ffe960a4..69b67958c2532 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java
@@ -27,7 +27,9 @@
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestAfsCheckPath {
@@ -56,11 +58,13 @@ public void testCheckPathWithTheSameNonDefaultPort()
afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT));
}
- @Test(expected=InvalidPathException.class)
+ @Test
public void testCheckPathWithDifferentPorts() throws URISyntaxException {
- URI uri = new URI("dummy://dummy-host:" + DEFAULT_PORT);
- AbstractFileSystem afs = new DummyFileSystem(uri);
- afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT));
+ assertThrows(InvalidPathException.class, () -> {
+ URI uri = new URI("dummy://dummy-host:" + DEFAULT_PORT);
+ AbstractFileSystem afs = new DummyFileSystem(uri);
+ afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT));
+ });
}
private static class DummyFileSystem extends AbstractFileSystem {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
index f182fe5da7c36..647144206f122 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
@@ -24,8 +24,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestAvroFSInput {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java
index 72e850b1313d5..e0c812cc8fa4a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java
@@ -17,11 +17,12 @@
*/
package org.apache.hadoop.fs;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
public class TestBlockLocation {
@@ -70,7 +71,8 @@ private static void checkBlockLocation(final BlockLocation loc,
/**
* Call all the constructors and verify the delegation is working properly
*/
- @Test(timeout = 5000)
+ @Test
+ @Timeout(value = 5)
public void testBlockLocationConstructors() throws Exception {
//
BlockLocation loc;
@@ -91,7 +93,8 @@ public void testBlockLocationConstructors() throws Exception {
/**
* Call each of the setters and verify
*/
- @Test(timeout = 5000)
+ @Test
+ @Timeout(value = 5)
public void testBlockLocationSetters() throws Exception {
BlockLocation loc;
loc = new BlockLocation();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
index 8b42aa6779dad..03494e728937b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
@@ -26,8 +26,14 @@
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.*;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
public class TestChecksumFileSystem {
static final String TEST_ROOT_DIR =
@@ -35,7 +41,7 @@ public class TestChecksumFileSystem {
static LocalFileSystem localFs;
- @Before
+ @BeforeEach
public void resetLocalFs() throws Exception {
localFs = FileSystem.getLocal(new Configuration());
localFs.setVerifyChecksum(true);
@@ -77,12 +83,12 @@ public void testVerifyChecksum() throws Exception {
readFile(localFs, testPath, 1025);
localFs.delete(localFs.getChecksumFile(testPath), true);
- assertTrue("checksum deleted", !localFs.exists(localFs.getChecksumFile(testPath)));
+ assertTrue(!localFs.exists(localFs.getChecksumFile(testPath)), "checksum deleted");
//copying the wrong checksum file
FileUtil.copy(localFs, localFs.getChecksumFile(testPath11), localFs,
localFs.getChecksumFile(testPath),false,true,localFs.getConf());
- assertTrue("checksum exists", localFs.exists(localFs.getChecksumFile(testPath)));
+ assertTrue(localFs.exists(localFs.getChecksumFile(testPath)), "checksum exists");
boolean errorRead = false;
try {
@@ -90,12 +96,12 @@ public void testVerifyChecksum() throws Exception {
}catch(ChecksumException ie) {
errorRead = true;
}
- assertTrue("error reading", errorRead);
+ assertTrue(errorRead, "error reading");
//now setting verify false, the read should succeed
localFs.setVerifyChecksum(false);
String str = readFile(localFs, testPath, 1024).toString();
- assertTrue("read", "testing".equals(str));
+ assertTrue("testing".equals(str), "read");
}
@Test
@@ -153,7 +159,7 @@ public void testTruncatedChecksum() throws Exception {
// telling it not to verify checksums, should avoid issue.
localFs.setVerifyChecksum(false);
String str = readFile(localFs, testPath, 1024).toString();
- assertTrue("read", "testing truncation".equals(str));
+ assertTrue("testing truncation".equals(str), "read");
}
@Test
@@ -164,13 +170,11 @@ public void testStreamType() throws Exception {
localFs.setVerifyChecksum(true);
in = localFs.open(testPath);
- assertTrue("stream is input checker",
- in.getWrappedStream() instanceof FSInputChecker);
+ assertTrue(in.getWrappedStream() instanceof FSInputChecker, "stream is input checker");
localFs.setVerifyChecksum(false);
in = localFs.open(testPath);
- assertFalse("stream is not input checker",
- in.getWrappedStream() instanceof FSInputChecker);
+ assertFalse(in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker");
}
@Test
@@ -200,7 +204,7 @@ public void testCorruptedChecksum() throws Exception {
} catch (ChecksumException ce) {
e = ce;
} finally {
- assertNotNull("got checksum error", e);
+ assertNotNull(e, "got checksum error");
}
localFs.setVerifyChecksum(false);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java
index 084c6a0aef83d..aeac1fbad7556 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java
@@ -17,8 +17,7 @@
*/
package org.apache.hadoop.fs;
-
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
@@ -30,8 +29,8 @@
import org.apache.hadoop.fs.shell.CommandFormat.NotEnoughArgumentsException;
import org.apache.hadoop.fs.shell.CommandFormat.TooManyArgumentsException;
import org.apache.hadoop.fs.shell.CommandFormat.UnknownOptionException;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
/**
* This class tests the command line parsing
@@ -41,7 +40,7 @@ public class TestCommandFormat {
private static List expectedArgs;
private static Set expectedOpts;
- @Before
+ @BeforeEach
public void setUp() {
args = new ArrayList<>();
expectedOpts = new HashSet<>();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java
index 98f9f2021f8b4..c911d79e0146a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java
@@ -17,14 +17,16 @@
*/
package org.apache.hadoop.fs;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.mockito.InOrder;
public class TestContentSummary {
@@ -33,12 +35,12 @@ public class TestContentSummary {
@Test
public void testConstructorEmpty() {
ContentSummary contentSummary = new ContentSummary.Builder().build();
- assertEquals("getLength", 0, contentSummary.getLength());
- assertEquals("getFileCount", 0, contentSummary.getFileCount());
- assertEquals("getDirectoryCount", 0, contentSummary.getDirectoryCount());
- assertEquals("getQuota", -1, contentSummary.getQuota());
- assertEquals("getSpaceConsumed", 0, contentSummary.getSpaceConsumed());
- assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota());
+ assertEquals(0, contentSummary.getLength(), "getLength");
+ assertEquals(0, contentSummary.getFileCount(), "getFileCount");
+ assertEquals(0, contentSummary.getDirectoryCount(), "getDirectoryCount");
+ assertEquals(-1, contentSummary.getQuota(), "getQuota");
+ assertEquals(0, contentSummary.getSpaceConsumed(), "getSpaceConsumed");
+ assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota");
}
// check the full constructor with quota information
@@ -54,14 +56,13 @@ public void testConstructorWithQuota() {
ContentSummary contentSummary = new ContentSummary.Builder().length(length).
fileCount(fileCount).directoryCount(directoryCount).quota(quota).
spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
- assertEquals("getLength", length, contentSummary.getLength());
- assertEquals("getFileCount", fileCount, contentSummary.getFileCount());
- assertEquals("getDirectoryCount", directoryCount,
- contentSummary.getDirectoryCount());
- assertEquals("getQuota", quota, contentSummary.getQuota());
- assertEquals("getSpaceConsumed", spaceConsumed,
- contentSummary.getSpaceConsumed());
- assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota());
+ assertEquals(length, contentSummary.getLength(), "getLength");
+ assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount");
+ assertEquals(directoryCount, contentSummary.getDirectoryCount(), "getDirectoryCount");
+ assertEquals(quota, contentSummary.getQuota(), "getQuota");
+ assertEquals(spaceConsumed,
+ contentSummary.getSpaceConsumed(), "getSpaceConsumed");
+ assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota");
}
// check the constructor with quota information
@@ -74,13 +75,13 @@ public void testConstructorNoQuota() {
ContentSummary contentSummary = new ContentSummary.Builder().length(length).
fileCount(fileCount).directoryCount(directoryCount).
spaceConsumed(length).build();
- assertEquals("getLength", length, contentSummary.getLength());
- assertEquals("getFileCount", fileCount, contentSummary.getFileCount());
- assertEquals("getDirectoryCount", directoryCount,
- contentSummary.getDirectoryCount());
- assertEquals("getQuota", -1, contentSummary.getQuota());
- assertEquals("getSpaceConsumed", length, contentSummary.getSpaceConsumed());
- assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota());
+ assertEquals(length, contentSummary.getLength(), "getLength");
+ assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount");
+ assertEquals(directoryCount,
+ contentSummary.getDirectoryCount(), "getDirectoryCount");
+ assertEquals(-1, contentSummary.getQuota(), "getQuota");
+ assertEquals(length, contentSummary.getSpaceConsumed(), "getSpaceConsumed");
+ assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota");
}
// check the write method
@@ -127,14 +128,12 @@ public void testReadFields() throws IOException {
.thenReturn(spaceQuota);
contentSummary.readFields(in);
- assertEquals("getLength", length, contentSummary.getLength());
- assertEquals("getFileCount", fileCount, contentSummary.getFileCount());
- assertEquals("getDirectoryCount", directoryCount,
- contentSummary.getDirectoryCount());
- assertEquals("getQuota", quota, contentSummary.getQuota());
- assertEquals("getSpaceConsumed", spaceConsumed,
- contentSummary.getSpaceConsumed());
- assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota());
+ assertEquals(length, contentSummary.getLength(), "getLength");
+ assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount");
+ assertEquals(directoryCount, contentSummary.getDirectoryCount(), "getDirectoryCount");
+ assertEquals(quota, contentSummary.getQuota(), "getQuota");
+ assertEquals(spaceConsumed, contentSummary.getSpaceConsumed(), "getSpaceConsumed");
+ assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota");
}
// check the header with quotas
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java
index 6b9a34c3b32eb..782a4e6411e50 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java
@@ -19,16 +19,16 @@
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Test to make sure df can run and work.
@@ -37,13 +37,13 @@ public class TestDFCachingGetSpaceUsed {
final static private File DF_DIR = GenericTestUtils.getTestDir("testdfspace");
public static final int FILE_SIZE = 1024;
- @Before
+ @BeforeEach
public void setUp() {
FileUtil.fullyDelete(DF_DIR);
assertTrue(DF_DIR.mkdirs());
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
FileUtil.fullyDelete(DF_DIR);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
index 3476f3eef4329..ec6c2d13ca332 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.fs;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
@@ -29,24 +29,23 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
public class TestDFVariations {
private static final String TEST_ROOT_DIR =
GenericTestUtils.getTestDir("testdfvariations").getAbsolutePath();
private static File test_root = null;
- @Before
+ @BeforeEach
public void setup() throws IOException {
test_root = new File(TEST_ROOT_DIR);
test_root.mkdirs();
}
- @After
+ @AfterEach
public void after() throws IOException {
FileUtil.setWritable(test_root, true);
FileUtil.fullyDelete(test_root);
@@ -65,25 +64,26 @@ protected String[] getExecString() {
}
}
- @Test(timeout=5000)
+ @Test
+ @Timeout(value = 5)
public void testMount() throws Exception {
XXDF df = new XXDF();
String expectedMount =
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar";
- assertEquals("Invalid mount point",
- expectedMount, df.getMount());
+ assertEquals(expectedMount, df.getMount(), "Invalid mount point");
}
- @Test(timeout=5000)
+ @Test
+ @Timeout(value = 5)
public void testFileSystem() throws Exception {
XXDF df = new XXDF();
String expectedFileSystem =
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
- assertEquals("Invalid filesystem",
- expectedFileSystem, df.getFilesystem());
+ assertEquals(expectedFileSystem, df.getFilesystem(), "Invalid filesystem");
}
- @Test(timeout=5000)
+ @Test
+ @Timeout(value = 5)
public void testDFInvalidPath() throws Exception {
// Generate a path that doesn't exist
Random random = new Random(0xDEADBEEFl);
@@ -106,7 +106,8 @@ public void testDFInvalidPath() throws Exception {
}
}
- @Test(timeout=5000)
+ @Test
+ @Timeout(value = 5)
public void testDFMalformedOutput() throws Exception {
DF df = new DF(new File("/"), 0l);
BufferedReader reader = new BufferedReader(new StringReader(
@@ -152,19 +153,19 @@ public void testDFMalformedOutput() throws Exception {
}
}
- @Test(timeout=5000)
+ @Test
+ @Timeout(value = 5)
public void testGetMountCurrentDirectory() throws Exception {
File currentDirectory = new File(".");
String workingDir = currentDirectory.getAbsoluteFile().getCanonicalPath();
DF df = new DF(new File(workingDir), 0L);
String mountPath = df.getMount();
File mountDir = new File(mountPath);
- assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should exist.",
- mountDir.exists());
- assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should be directory.",
- mountDir.isDirectory());
- assertTrue("Working dir ["+workingDir+"] should start with ["+mountPath+"].",
- workingDir.startsWith(mountPath));
+ assertTrue(mountDir.exists(), "Mount dir ["+mountDir.getAbsolutePath()+"] should exist.");
+ assertTrue(mountDir.isDirectory(),
+ "Mount dir ["+mountDir.getAbsolutePath()+"] should be directory.");
+ assertTrue(workingDir.startsWith(mountPath),
+ "Working dir ["+workingDir+"] should start with ["+mountPath+"].");
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
index f340cc202ed01..654867972183a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.fs;
import org.apache.hadoop.util.Shell;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import static org.junit.Assume.assumeFalse;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
import java.io.File;
import java.io.IOException;
@@ -37,14 +37,14 @@
public class TestDU {
final static private File DU_DIR = GenericTestUtils.getTestDir("dutmp");
- @Before
+ @BeforeEach
public void setUp() {
assumeFalse(Shell.WINDOWS);
FileUtil.fullyDelete(DU_DIR);
assertTrue(DU_DIR.mkdirs());
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
FileUtil.fullyDelete(DU_DIR);
}
@@ -91,9 +91,8 @@ public void testDU() throws IOException, InterruptedException {
long duSize = du.getUsed();
du.close();
- assertTrue("Invalid on-disk size",
- duSize >= writtenSize &&
- writtenSize <= (duSize + slack));
+ assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack),
+ "Invalid on-disk size");
//test with 0 interval, will not launch thread
du = new DU(file, 0, 1, -1);
@@ -101,18 +100,16 @@ public void testDU() throws IOException, InterruptedException {
duSize = du.getUsed();
du.close();
- assertTrue("Invalid on-disk size",
- duSize >= writtenSize &&
- writtenSize <= (duSize + slack));
+ assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack),
+ "Invalid on-disk size");
//test without launching thread
du = new DU(file, 10000, 0, -1);
du.init();
duSize = du.getUsed();
- assertTrue("Invalid on-disk size",
- duSize >= writtenSize &&
- writtenSize <= (duSize + slack));
+ assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack),
+ "Invalid on-disk size");
}
@Test
@@ -124,7 +121,7 @@ public void testDUGetUsedWillNotReturnNegative() throws IOException {
DU du = new DU(file, 10000L, 0, -1);
du.incDfsUsed(-Long.MAX_VALUE);
long duSize = du.getUsed();
- assertTrue(String.valueOf(duSize), duSize >= 0L);
+ assertTrue(duSize >= 0L, String.valueOf(duSize));
}
@Test
@@ -133,14 +130,14 @@ public void testDUSetInitialValue() throws IOException {
createFile(file, 8192);
DU du = new DU(file, 3000, 0, 1024);
du.init();
- assertTrue("Initial usage setting not honored", du.getUsed() == 1024);
+ assertTrue(du.getUsed() == 1024, "Initial usage setting not honored");
// wait until the first du runs.
try {
Thread.sleep(5000);
} catch (InterruptedException ie) {}
- assertTrue("Usage didn't get updated", du.getUsed() == 8192);
+ assertTrue(du.getUsed() == 8192, "Usage didn't get updated");
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java
index 9572bed4098f4..1a500ae9b65cb 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.conf.Configuration;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import static org.apache.hadoop.test.LambdaTestUtils.*;
/**
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java
index 5de32861db68d..28e937f53bd15 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java
@@ -21,8 +21,9 @@
import org.apache.commons.net.ftp.FTP;
import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestDelegateToFileSystem {
@@ -37,7 +38,7 @@ private void testDefaultUriInternal(String defaultUri)
FileSystem.setDefaultUri(conf, defaultUri);
final AbstractFileSystem ftpFs =
AbstractFileSystem.get(FTP_URI_NO_PORT, conf);
- Assert.assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri());
+ assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri());
}
@Test
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java
index 6030c12c16c4d..51638985bc7c6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
/**
* The default port of DelegateToFileSystem is set from child file system.
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java
index 582bc3142c872..8d11297d7cfd7 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java
@@ -18,8 +18,17 @@
package org.apache.hadoop.fs;
import java.io.IOException;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.atMost;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DelegationTokenRenewer.Renewable;
@@ -27,8 +36,9 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Time;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -42,7 +52,7 @@ public abstract class RenewableFileSystem extends FileSystem
Configuration conf;
FileSystem fs;
- @Before
+ @BeforeEach
public void setup() {
DelegationTokenRenewer.renewCycle = RENEW_CYCLE;
DelegationTokenRenewer.reset();
@@ -69,8 +79,8 @@ public Long answer(InvocationOnMock invocation) {
renewer.addRenewAction(fs);
- assertEquals("FileSystem not added to DelegationTokenRenewer", 1,
- renewer.getRenewQueueLength());
+ assertEquals(1, renewer.getRenewQueueLength(),
+ "FileSystem not added to DelegationTokenRenewer");
Thread.sleep(RENEW_CYCLE*2);
verify(token, atLeast(2)).renew(eq(conf));
@@ -82,8 +92,8 @@ public Long answer(InvocationOnMock invocation) {
verify(fs, never()).getDelegationToken(null);
verify(fs, never()).setDelegationToken(any());
- assertEquals("FileSystem not removed from DelegationTokenRenewer", 0,
- renewer.getRenewQueueLength());
+ assertEquals(0, renewer.getRenewQueueLength(),
+ "FileSystem not removed from DelegationTokenRenewer");
}
@Test
@@ -179,7 +189,8 @@ public Long answer(InvocationOnMock invocation) {
assertEquals(0, renewer.getRenewQueueLength());
}
- @Test(timeout=4000)
+ @Test
+ @Timeout(value = 4)
public void testMultipleTokensDoNotDeadlock() throws IOException,
InterruptedException {
Configuration conf = mock(Configuration.class);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
index 60b24c776c14e..5d792713bfd70 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java
@@ -17,14 +17,14 @@
*/
package org.apache.hadoop.fs;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,32 +51,32 @@ public void testConfBasedAndAPIBasedSetUMask() throws Exception {
String defaultlUMask =
conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
- assertEquals("Default UMask changed!", "022", defaultlUMask);
+ assertEquals("022", defaultlUMask, "Default UMask changed!");
URI uri1 = new URI("file://mydfs:50070/");
URI uri2 = new URI("file://tmp");
FileContext fc1 = FileContext.getFileContext(uri1, conf);
FileContext fc2 = FileContext.getFileContext(uri2, conf);
- assertEquals("Umask for fc1 is incorrect", 022, fc1.getUMask().toShort());
- assertEquals("Umask for fc2 is incorrect", 022, fc2.getUMask().toShort());
+ assertEquals(022, fc1.getUMask().toShort(), "Umask for fc1 is incorrect");
+ assertEquals(022, fc2.getUMask().toShort(), "Umask for fc2 is incorrect");
// Till a user explicitly calls FileContext.setUMask(), the updates through
// configuration should be reflected..
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "011");
- assertEquals("Umask for fc1 is incorrect", 011, fc1.getUMask().toShort());
- assertEquals("Umask for fc2 is incorrect", 011, fc2.getUMask().toShort());
+ assertEquals(011, fc1.getUMask().toShort(), "Umask for fc1 is incorrect");
+ assertEquals(011, fc2.getUMask().toShort(), "Umask for fc2 is incorrect");
// Stop reflecting the conf update for specific FileContexts, once an
// explicit setUMask is done.
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "066");
fc1.setUMask(FsPermission.createImmutable((short) 00033));
- assertEquals("Umask for fc1 is incorrect", 033, fc1.getUMask().toShort());
- assertEquals("Umask for fc2 is incorrect", 066, fc2.getUMask().toShort());
+ assertEquals(033, fc1.getUMask().toShort(), "Umask for fc1 is incorrect");
+ assertEquals(066, fc2.getUMask().toShort(), "Umask for fc2 is incorrect");
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
fc2.setUMask(FsPermission.createImmutable((short) 00044));
- assertEquals("Umask for fc1 is incorrect", 033, fc1.getUMask().toShort());
- assertEquals("Umask for fc2 is incorrect", 044, fc2.getUMask().toShort());
+ assertEquals(033, fc1.getUMask().toShort(), "Umask for fc1 is incorrect");
+ assertEquals(044, fc2.getUMask().toShort(), "Umask for fc2 is incorrect");
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
index 40db1fdda2130..df742f7223d52 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
@@ -20,16 +20,18 @@
import java.io.IOException;
import java.util.Set;
-import org.junit.Assert;
import org.apache.hadoop.util.ShutdownHookManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import static org.apache.hadoop.fs.FileContextTestHelper.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
/**
- * Tests {@link FileContext.#deleteOnExit(Path)} functionality.
+ * Tests {@link FileContext#deleteOnExit(Path)} functionality.
*/
public class TestFileContextDeleteOnExit {
private static int blockSize = 1024;
@@ -38,23 +40,23 @@ public class TestFileContextDeleteOnExit {
private final FileContextTestHelper helper = new FileContextTestHelper();
private FileContext fc;
- @Before
+ @BeforeEach
public void setup() throws IOException {
fc = FileContext.getLocalFSFileContext();
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
fc.delete(helper.getTestRootPath(fc), true);
}
private void checkDeleteOnExitData(int size, FileContext fc, Path... paths) {
- Assert.assertEquals(size, FileContext.DELETE_ON_EXIT.size());
+ assertEquals(size, FileContext.DELETE_ON_EXIT.size());
Set set = FileContext.DELETE_ON_EXIT.get(fc);
- Assert.assertEquals(paths.length, (set == null ? 0 : set.size()));
+ assertEquals(paths.length, (set == null ? 0 : set.size()));
for (Path path : paths) {
- Assert.assertTrue(set.contains(path));
+ assertTrue(set.contains(path));
}
}
@@ -67,7 +69,7 @@ public void testDeleteOnExit() throws Exception {
checkDeleteOnExitData(1, fc, file1);
// Ensure shutdown hook is added
- Assert.assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER));
+ assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER));
Path file2 = helper.getTestRootPath(fc, "dir1/file2");
createFile(fc, file2, numBlocks, blockSize);
@@ -83,8 +85,8 @@ public void testDeleteOnExit() throws Exception {
// paths are cleaned up
FileContext.FINALIZER.run();
checkDeleteOnExitData(0, fc, new Path[0]);
- Assert.assertFalse(exists(fc, file1));
- Assert.assertFalse(exists(fc, file2));
- Assert.assertFalse(exists(fc, dir));
+ assertFalse(exists(fc, file1));
+ assertFalse(exists(fc, file2));
+ assertFalse(exists(fc, dir));
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
index 2919de20bffd9..7dd9590d944e7 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
@@ -24,9 +24,11 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* Tests resolution of AbstractFileSystems for a given path with symlinks.
@@ -42,12 +44,13 @@ public class TestFileContextResolveAfs {
private FileContext fc;
private FileSystem localFs;
- @Before
+ @BeforeEach
public void setup() throws IOException {
fc = FileContext.getFileContext();
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testFileContextResolveAfs() throws IOException {
Configuration conf = new Configuration();
localFs = FileSystem.get(conf);
@@ -60,7 +63,7 @@ public void testFileContextResolveAfs() throws IOException {
fc.createSymlink(localPath, linkPath, true);
Set afsList = fc.resolveAbstractFileSystems(linkPath);
- Assert.assertEquals(1, afsList.size());
+ assertEquals(1, afsList.size());
localFs.delete(linkPath, true);
localFs.delete(localPath, true);
localFs.close();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
index 61a688ea4ee8b..3266bb657c4d0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
@@ -17,7 +17,9 @@
*/
package org.apache.hadoop.fs;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -32,7 +34,7 @@
import java.util.Collections;
import java.util.List;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
@@ -85,8 +87,7 @@ public void testFileStatusWritable() throws Exception {
int iterator = 0;
for (FileStatus fs : tests) {
dest.readFields(in);
- assertEquals("Different FileStatuses in iteration " + iterator,
- dest, fs);
+ assertEquals(dest, fs, "Different FileStatuses in iteration " + iterator);
iterator++;
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java
index 2b8be39193a03..d792a49554b3c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.fs;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.io.IOException;
import java.net.URI;
@@ -29,8 +29,8 @@
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.NetUtilsTestResolver;
import org.apache.hadoop.util.Progressable;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
public class TestFileSystemCanonicalization {
static String[] authorities = {
@@ -44,7 +44,7 @@ public class TestFileSystemCanonicalization {
};
- @BeforeClass
+ @BeforeAll
public static void initialize() throws Exception {
NetUtilsTestResolver.install();
}
@@ -288,7 +288,7 @@ void verifyCheckPath(FileSystem fs, String path, boolean shouldPass) {
}
assertEquals(pathAuthority, fqPath.toUri().getAuthority());
} else {
- assertNotNull("did not fail", e);
+ assertNotNull(e, "did not fail");
assertEquals("Wrong FS: "+rawPath+", expected: "+fs.getUri(),
e.getMessage());
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
index 10ad8a14487ef..c65ba2d7dfa9d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java
@@ -27,11 +27,12 @@
import java.net.URL;
import java.util.ServiceConfigurationError;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.fail;
/**
* Tests related to filesystem creation and lifecycle.
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java
index 5710049afb104..e68a0857723c7 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java
@@ -21,24 +21,23 @@
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
-import java.util.concurrent.TimeUnit;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
/**
* This tests basic operations of {@link FileSystemStorageStatistics} class.
*/
+@Timeout(10)
public class TestFileSystemStorageStatistics {
private static final Logger LOG = LoggerFactory.getLogger(
TestFileSystemStorageStatistics.class);
@@ -62,10 +61,7 @@ public class TestFileSystemStorageStatistics {
private FileSystemStorageStatistics storageStatistics =
new FileSystemStorageStatistics(FS_STORAGE_STATISTICS_NAME, statistics);
- @Rule
- public final Timeout globalTimeout = new Timeout(10, TimeUnit.SECONDS);
-
- @Before
+ @BeforeEach
public void setup() {
statistics.incrementBytesRead(RandomUtils.nextInt(0, 100));
statistics.incrementBytesWritten(RandomUtils.nextInt(0, 100));
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java
index 0372537cb3475..90edf7d4ff5b8 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java
@@ -18,8 +18,15 @@
package org.apache.hadoop.fs;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
import java.io.IOException;
@@ -28,7 +35,7 @@
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
index 1b42290cedc5e..6ce01fe7176e1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
@@ -18,8 +18,16 @@
package org.apache.hadoop.fs;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.reset;
import java.io.IOException;
import java.lang.reflect.Method;
@@ -36,8 +44,8 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.DelegationTokenIssuer;
import org.apache.hadoop.util.Progressable;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
public class TestFilterFileSystem {
@@ -45,7 +53,7 @@ public class TestFilterFileSystem {
private static final Logger LOG = FileSystem.LOG;
private static final Configuration conf = new Configuration();
- @BeforeClass
+ @BeforeAll
public static void setup() {
conf.set("fs.flfs.impl", FilterLocalFileSystem.class.getName());
conf.setBoolean("fs.flfs.impl.disable.cache", true);
@@ -179,8 +187,8 @@ public void testFilterFileSystem() throws Exception {
}
}
}
- assertTrue((errors + " methods were not overridden correctly - see" +
- " log"), errors <= 0);
+ assertTrue(errors <= 0, (errors + " methods were not overridden correctly - see" +
+ " log"));
}
@Test
@@ -299,11 +307,8 @@ public void testFilterPathCapabilites() throws Exception {
try (FilterFileSystem flfs = new FilterLocalFileSystem()) {
flfs.initialize(URI.create("filter:/"), conf);
Path src = new Path("/src");
- assertFalse(
- "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for "
- + flfs,
- flfs.hasPathCapability(src,
- CommonPathCapabilities.FS_MULTIPART_UPLOADER));
+ assertFalse(flfs.hasPathCapability(src, CommonPathCapabilities.FS_MULTIPART_UPLOADER),
+ "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + flfs);
}
}
@@ -325,7 +330,7 @@ private void checkFsConf(FileSystem fs, Configuration conf, int expectDepth) {
int depth = 0;
while (true) {
depth++;
- assertFalse("depth "+depth+">"+expectDepth, depth > expectDepth);
+ assertFalse(depth > expectDepth, "depth "+depth+">"+expectDepth);
assertEquals(conf, fs.getConf());
if (!(fs instanceof FilterFileSystem)) {
break;
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
index 396924810d98e..77794490744c3 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.viewfs.ConfigUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
public class TestFilterFs {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java
index 574ed704da277..f3c822a985d29 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.fs;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.util.DataChecksum;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class TestFsOptions {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java
index 67906d526bc8a..a8020a66183a2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java
@@ -22,9 +22,12 @@
import org.apache.hadoop.fs.shell.CommandFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ToolRunner;
-import org.assertj.core.api.Assertions;
-import org.junit.Test;
-import org.mockito.Mockito;
+import org.junit.jupiter.api.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
public class TestFsShell {
@@ -65,11 +68,11 @@ public void testDFSWithInvalidCommmand() throws Throwable {
try (GenericTestUtils.SystemErrCapturer capture =
new GenericTestUtils.SystemErrCapturer()) {
ToolRunner.run(shell, new String[]{"dfs -mkdirs"});
- Assertions.assertThat(capture.getOutput())
+ assertThat(capture.getOutput())
.as("FSShell dfs command did not print the error " +
"message when invalid command is passed")
.contains("-mkdirs: Unknown command");
- Assertions.assertThat(capture.getOutput())
+ assertThat(capture.getOutput())
.as("FSShell dfs command did not print help " +
"message when invalid command is passed")
.contains("Usage: hadoop fs [generic options]");
@@ -79,22 +82,22 @@ public void testDFSWithInvalidCommmand() throws Throwable {
@Test
public void testExceptionNullMessage() throws Exception {
final String cmdName = "-cmdExNullMsg";
- final Command cmd = Mockito.mock(Command.class);
- Mockito.when(cmd.run(Mockito.any())).thenThrow(
+ final Command cmd = mock(Command.class);
+ when(cmd.run(any())).thenThrow(
new IllegalArgumentException());
- Mockito.when(cmd.getUsage()).thenReturn(cmdName);
+ when(cmd.getUsage()).thenReturn(cmdName);
- final CommandFactory cmdFactory = Mockito.mock(CommandFactory.class);
+ final CommandFactory cmdFactory = mock(CommandFactory.class);
final String[] names = {cmdName};
- Mockito.when(cmdFactory.getNames()).thenReturn(names);
- Mockito.when(cmdFactory.getInstance(cmdName)).thenReturn(cmd);
+ when(cmdFactory.getNames()).thenReturn(names);
+ when(cmdFactory.getInstance(cmdName)).thenReturn(cmd);
FsShell shell = new FsShell(new Configuration());
shell.commandFactory = cmdFactory;
try (GenericTestUtils.SystemErrCapturer capture =
new GenericTestUtils.SystemErrCapturer()) {
ToolRunner.run(shell, new String[]{cmdName});
- Assertions.assertThat(capture.getOutput())
+ assertThat(capture.getOutput())
.contains(cmdName + ": Null exception message");
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
index 7556bc75fb27a..319ae0e2d8a5b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
@@ -20,10 +20,10 @@
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
@@ -34,9 +34,9 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -48,7 +48,7 @@ public class TestFsShellCopy {
static LocalFileSystem lfs;
static Path testRootDir, srcPath, dstPath;
- @BeforeClass
+ @BeforeAll
public static void setup() throws Exception {
conf = new Configuration();
shell = new FsShell(conf);
@@ -62,7 +62,7 @@ public static void setup() throws Exception {
dstPath = new Path(testRootDir, "dstFile");
}
- @Before
+ @BeforeEach
public void prepFiles() throws Exception {
lfs.setVerifyChecksum(true);
lfs.setWriteChecksum(true);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java
index 05ad5c23e6542..c2a3a1c1efdc9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.fs;
import org.apache.hadoop.conf.Configuration;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertThrows;
/**
* Test FsShell -ls command.
@@ -34,7 +35,7 @@ public class TestFsShellList {
private static LocalFileSystem lfs;
private static Path testRootDir;
- @BeforeClass
+ @BeforeAll
public static void setup() throws Exception {
conf = new Configuration();
shell = new FsShell(conf);
@@ -47,7 +48,7 @@ public static void setup() throws Exception {
assertThat(lfs.mkdirs(testRootDir)).isTrue();
}
- @AfterClass
+ @AfterAll
public static void teardown() throws Exception {
lfs.delete(testRootDir, true);
}
@@ -78,14 +79,15 @@ public void testList() throws Exception {
/*
UGI params should take effect when we pass.
*/
- @Test(expected = IllegalArgumentException.class)
+ @Test
public void testListWithUGI() throws Exception {
- FsShell fsShell = new FsShell(new Configuration());
- //Passing Dummy such that it should through IAE
- fsShell.getConf()
- .set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
- "DUMMYAUTH");
- String[] lsArgv = new String[] {"-ls", testRootDir.toString()};
- fsShell.run(lsArgv);
+ assertThrows(IllegalArgumentException.class, () -> {
+ FsShell fsShell = new FsShell(new Configuration());
+ //Passing Dummy such that it should through IAE
+ fsShell.getConf().set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "DUMMYAUTH");
+ String[] lsArgv = new String[]{"-ls", testRootDir.toString()};
+ fsShell.run(lsArgv);
+ });
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
index 77b2f445a48de..34a6d254940ce 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
@@ -19,9 +19,9 @@
package org.apache.hadoop.fs;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -42,8 +42,9 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -59,7 +60,7 @@ public class TestFsShellReturnCode {
private static FileSystem fileSys;
private static FsShell fsShell;
- @BeforeClass
+ @BeforeAll
public static void setup() throws IOException {
conf.setClass("fs.file.impl", LocalFileSystemExtn.class, LocalFileSystem.class);
fileSys = FileSystem.get(conf);
@@ -105,14 +106,10 @@ private void change(int exit, String owner, String group, String...files)
FileStatus[] stats = fileSys.globStatus(new Path(files[i]));
if (stats != null) {
for (int j=0; j < stats.length; j++) {
- assertEquals("check owner of " + files[i],
- ((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()),
- stats[j].getOwner()
- );
- assertEquals("check group of " + files[i],
- ((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()),
- stats[j].getGroup()
- );
+ assertEquals(((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()),
+ stats[j].getOwner(), "check owner of " + files[i]);
+ assertEquals(((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()),
+ stats[j].getGroup(), "check group of " + files[i]);
}
}
}
@@ -127,7 +124,8 @@ private void change(int exit, String owner, String group, String...files)
*
* @throws Exception
*/
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testChmod() throws Exception {
Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists");
@@ -183,7 +181,8 @@ public void testChmod() throws Exception {
*
* @throws Exception
*/
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testChown() throws Exception {
Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists");
@@ -239,7 +238,8 @@ public void testChown() throws Exception {
*
* @throws Exception
*/
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testChgrp() throws Exception {
Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists");
@@ -284,7 +284,8 @@ public void testChgrp() throws Exception {
change(1, null, "admin", f2, f7);
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
throws Exception {
Configuration conf = new Configuration();
@@ -303,20 +304,22 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
args[0] = "-get";
args[1] = new Path(tdir.toUri().getPath(), "/invalidSrc").toString();
args[2] = new Path(tdir.toUri().getPath(), "/invalidDst").toString();
- assertTrue("file exists", !fileSys.exists(new Path(args[1])));
- assertTrue("file exists", !fileSys.exists(new Path(args[2])));
+ assertTrue(!fileSys.exists(new Path(args[1])), "file exists");
+ assertTrue(!fileSys.exists(new Path(args[2])), "file exists");
int run = shell.run(args);
results = bytes.toString();
- assertEquals("Return code should be 1", 1, run);
- assertTrue(" Null is coming when source path is invalid. ",!results.contains("get: null"));
- assertTrue(" Not displaying the intended message ",results.contains("get: `"+args[1]+"': No such file or directory"));
+ assertEquals(1, run, "Return code should be 1");
+ assertTrue(!results.contains("get: null"), " Null is coming when source path is invalid. ");
+ assertTrue(results.contains("get: `" + args[1] + "': No such file or directory"),
+ " Not displaying the intended message ");
} finally {
IOUtils.closeStream(out);
System.setErr(oldErr);
}
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testRmWithNonexistentGlob() throws Exception {
Configuration conf = new Configuration();
FsShell shell = new FsShell();
@@ -337,7 +340,8 @@ public void testRmWithNonexistentGlob() throws Exception {
}
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testRmForceWithNonexistentGlob() throws Exception {
Configuration conf = new Configuration();
FsShell shell = new FsShell();
@@ -356,7 +360,8 @@ public void testRmForceWithNonexistentGlob() throws Exception {
}
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testInvalidDefaultFS() throws Exception {
// if default fs doesn't exist or is invalid, but the path provided in
// arguments is valid - fsshell should work
@@ -379,7 +384,7 @@ public void testInvalidDefaultFS() throws Exception {
int run = shell.run(args);
results = bytes.toString();
LOG.info("result=" + results);
- assertTrue("Return code should be 0", run == 0);
+ assertTrue(run == 0, "Return code should be 0");
} finally {
IOUtils.closeStream(out);
System.setErr(oldErr);
@@ -387,7 +392,8 @@ public void testInvalidDefaultFS() throws Exception {
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testInterrupt() throws Exception {
MyFsShell shell = new MyFsShell();
shell.setConf(new Configuration());
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
index c2bd5b2133d47..e76ed27bb9e58 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java
@@ -24,9 +24,9 @@
import org.apache.hadoop.fs.shell.TouchCommands.Touch;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -39,7 +39,7 @@ public class TestFsShellTouch {
static LocalFileSystem lfs;
static Path testRootDir;
- @BeforeClass
+ @BeforeAll
public static void setup() throws Exception {
Configuration conf = new Configuration();
shell = new FsShell(conf);
@@ -51,7 +51,7 @@ public static void setup() throws Exception {
lfs.setWorkingDirectory(testRootDir);
}
- @Before
+ @BeforeEach
public void prepFiles() throws Exception {
lfs.setVerifyChecksum(true);
lfs.setWriteChecksum(true);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java
index d15c1ac515856..37499d3b1cb10 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java
@@ -14,15 +14,16 @@
package org.apache.hadoop.fs;
import org.apache.hadoop.conf.Configuration;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import java.io.*;
import java.net.URL;
import java.nio.file.Paths;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
/**
* Test case for FsUrlConnection with relativePath and SPACE.
*/
@@ -43,7 +44,7 @@ public class TestFsUrlConnectionPath {
private static final Configuration CONFIGURATION = new Configuration();
- @BeforeClass
+ @BeforeAll
public static void initialize() throws IOException{
write(ABSOLUTE_PATH.substring(5), DATA);
write(RELATIVE_PATH.substring(5), DATA);
@@ -52,7 +53,7 @@ public static void initialize() throws IOException{
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
- @AfterClass
+ @AfterAll
public static void cleanup(){
delete(ABSOLUTE_PATH.substring(5));
delete(RELATIVE_PATH.substring(5));
@@ -83,25 +84,25 @@ public static int readStream(String path) throws Exception{
@Test
public void testAbsolutePath() throws Exception{
int length = readStream(ABSOLUTE_PATH);
- Assert.assertTrue(length > 1);
+ assertTrue(length > 1);
}
@Test
public void testRelativePath() throws Exception{
int length = readStream(RELATIVE_PATH);
- Assert.assertTrue(length > 1);
+ assertTrue(length > 1);
}
@Test
public void testAbsolutePathWithSpace() throws Exception{
int length = readStream(ABSOLUTE_PATH_W_ENCODED_SPACE);
- Assert.assertTrue(length > 1);
+ assertTrue(length > 1);
}
@Test
public void testRelativePathWithSpace() throws Exception{
int length = readStream(RELATIVE_PATH_W_ENCODED_SPACE);
- Assert.assertTrue(length > 1);
+ assertTrue(length > 1);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
index f43480e78df35..932ace76d2595 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
@@ -22,10 +22,11 @@
import java.util.Comparator;
import java.util.Random;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
@@ -42,7 +43,7 @@ public class TestGetFileBlockLocations {
private FileSystem fs;
private Random random;
- @Before
+ @BeforeEach
public void setUp() throws IOException {
conf = new Configuration();
Path rootPath = new Path(TEST_ROOT_DIR);
@@ -92,7 +93,7 @@ public int compare(BlockLocation arg0, BlockLocation arg1) {
}
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
fs.delete(path, true);
fs.close();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java
index d696dbfe40f57..7ef34281982a9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java
@@ -19,26 +19,29 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestGetSpaceUsed {
final static private File DIR =
GenericTestUtils.getTestDir("TestGetSpaceUsed");
- @Before
+ @BeforeEach
public void setUp() {
FileUtil.fullyDelete(DIR);
assertTrue(DIR.mkdirs());
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
FileUtil.fullyDelete(DIR);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
index 9d75ba0160ba7..b18047b771e04 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
@@ -20,8 +20,8 @@
import java.io.IOException;
import java.util.List;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestGlobExpander {
@@ -55,11 +55,11 @@ private void checkExpansionIsIdentical(String filePattern) throws IOException {
private void checkExpansion(String filePattern, String... expectedExpansions)
throws IOException {
List actualExpansions = GlobExpander.expand(filePattern);
- assertEquals("Different number of expansions", expectedExpansions.length,
- actualExpansions.size());
+ assertEquals(expectedExpansions.length,
+ actualExpansions.size(), "Different number of expansions");
for (int i = 0; i < expectedExpansions.length; i++) {
- assertEquals("Expansion of " + filePattern, expectedExpansions[i],
- actualExpansions.get(i));
+ assertEquals(expectedExpansions[i],
+ actualExpansions.get(i), "Expansion of " + filePattern);
}
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java
index b409a8f929421..27ae520aa9fd4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java
@@ -18,8 +18,9 @@
package org.apache.hadoop.fs;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.re2j.PatternSyntaxException;
/**
@@ -31,8 +32,7 @@ private void assertMatch(boolean yes, String glob, String...input) {
for (String s : input) {
boolean result = pattern.matches(s);
- assertTrue(glob +" should"+ (yes ? "" : " not") +" match "+ s,
- yes ? result : !result);
+ assertTrue(yes ? result : !result, glob +" should"+ (yes ? "" : " not") +" match "+ s);
}
}
@@ -45,7 +45,7 @@ private void shouldThrow(String... globs) {
e.printStackTrace();
continue;
}
- assertTrue("glob "+ glob +" should throw", false);
+ assertTrue(false, "glob "+ glob +" should throw");
}
}
@@ -72,7 +72,8 @@ private void shouldThrow(String... globs) {
shouldThrow("[", "[[]]", "{", "\\");
}
- @Test(timeout=10000) public void testPathologicalPatterns() {
+ @Test @Timeout(value = 10)
+ public void testPathologicalPatterns() {
String badFilename = "job_1429571161900_4222-1430338332599-tda%2D%2D+******************************+++...%270%27%28Stage-1430338580443-39-2000-SUCCEEDED-production%2Dhigh-1430338340360.jhist";
assertMatch(true, badFilename, badFilename);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
index 26d0361d6a255..612954de784db 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
@@ -28,8 +28,7 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.DelegationTokenIssuer;
import org.apache.hadoop.util.Progressable;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -47,6 +46,7 @@
import static org.apache.hadoop.fs.Options.CreateOpts;
import static org.apache.hadoop.fs.Options.Rename;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.fail;
@SuppressWarnings("deprecation")
public class TestHarFileSystem {
@@ -277,7 +277,7 @@ static void checkInvalidPath(String s, Configuration conf) {
final Path p = new Path(s);
try {
p.getFileSystem(conf);
- Assert.fail(p + " is an invalid path.");
+ fail(p + " is an invalid path.");
} catch (IOException e) {
// Expected
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
index eccf491cca8e3..8a2b5fc19230a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
@@ -22,10 +22,9 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
@@ -34,9 +33,11 @@
import java.util.HashSet;
import java.util.Set;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
/**
@@ -125,7 +126,7 @@ private void writeVersionToMasterIndexImpl(int version, Path masterIndexPath) th
}
}
- @Before
+ @BeforeEach
public void before() throws Exception {
final File rootDirIoFile = new File(rootPath.toUri().getPath());
rootDirIoFile.mkdirs();
@@ -138,7 +139,7 @@ public void before() throws Exception {
harFileSystem = createHarFileSystem(conf);
}
- @After
+ @AfterEach
public void after() throws Exception {
// close Har FS:
final FileSystem harFS = harFileSystem;
@@ -256,11 +257,11 @@ public void testListLocatedStatus() throws Exception {
RemoteIterator fileList = hfs.listLocatedStatus(path);
while (fileList.hasNext()) {
String fileName = fileList.next().getPath().getName();
- assertTrue(fileName + " not in expected files list", expectedFileNames.contains(fileName));
+ assertTrue(expectedFileNames.contains(fileName), fileName + " not in expected files list");
expectedFileNames.remove(fileName);
}
- assertEquals("Didn't find all of the expected file names: " + expectedFileNames,
- 0, expectedFileNames.size());
+ assertEquals(0, expectedFileNames.size(),
+ "Didn't find all of the expected file names: " + expectedFileNames);
}
@Test
@@ -273,10 +274,9 @@ public void testMakeQualifiedPath() throws Exception {
+ harPath.toUri().getPath().toString();
Path path = new Path(harPathWithUserinfo);
Path qualifiedPath = path.getFileSystem(conf).makeQualified(path);
- assertTrue(String.format(
- "The qualified path (%s) did not match the expected path (%s).",
- qualifiedPath.toString(), harPathWithUserinfo),
- qualifiedPath.toString().equals(harPathWithUserinfo));
+ assertTrue(qualifiedPath.toString().equals(harPathWithUserinfo),
+ String.format("The qualified path (%s) did not match the expected path (%s).",
+ qualifiedPath.toString(), harPathWithUserinfo));
}
// ========== Negative:
@@ -291,7 +291,7 @@ public void testNegativeInitWithoutIndex() throws Exception {
final URI uri = new URI("har://" + harPath.toString());
try {
hfs.initialize(uri, new Configuration());
- Assert.fail("Exception expected.");
+ fail("Exception expected.");
} catch (IOException ioe) {
// ok, expected.
}
@@ -302,7 +302,7 @@ public void testNegativeGetHarVersionOnNotInitializedFS() throws Exception {
final HarFileSystem hfs = new HarFileSystem(localFileSystem);
try {
int version = hfs.getHarVersion();
- Assert.fail("Exception expected, but got a Har version " + version + ".");
+ fail("Exception expected, but got a Har version " + version + ".");
} catch (IOException ioe) {
// ok, expected.
}
@@ -326,7 +326,7 @@ public void testNegativeInitWithAnUnsupportedVersion() throws Exception {
final URI uri = new URI("har://" + harPath.toString());
try {
hfs.initialize(uri, new Configuration());
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
@@ -340,28 +340,28 @@ public void testNegativeHarFsModifications() throws Exception {
try {
harFileSystem.create(fooPath, new FsPermission("+rwx"), true, 1024,
(short) 88, 1024, null);
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.setReplication(fooPath, (short) 55);
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.delete(fooPath, true);
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.mkdirs(fooPath, new FsPermission("+rwx"));
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
@@ -369,35 +369,35 @@ public void testNegativeHarFsModifications() throws Exception {
final Path indexPath = new Path(harPath, "_index");
try {
harFileSystem.copyFromLocalFile(false, indexPath, fooPath);
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.startLocalOutput(fooPath, indexPath);
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.completeLocalOutput(fooPath, indexPath);
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.setOwner(fooPath, "user", "group");
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
try {
harFileSystem.setPermission(fooPath, new FsPermission("+x"));
- Assert.fail("IOException expected.");
+ fail("IOException expected.");
} catch (IOException ioe) {
// ok, expected.
}
@@ -406,7 +406,7 @@ public void testNegativeHarFsModifications() throws Exception {
@Test
public void testHarFsWithoutAuthority() throws Exception {
final URI uri = harFileSystem.getUri();
- Assert.assertNull("har uri authority not null: " + uri, uri.getAuthority());
+ assertNull(uri.getAuthority(), "har uri authority not null: " + uri);
FileContext.getFileContext(uri, conf);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
index 98ae8df891958..97023da62d2bf 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
@@ -25,11 +25,13 @@
import java.util.Arrays;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.After;
-import static org.junit.Assert.*;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import static org.apache.hadoop.fs.HardLink.*;
@@ -85,7 +87,7 @@ public class TestHardLink {
* Assure clean environment for start of testing
* @throws IOException
*/
- @BeforeClass
+ @BeforeAll
public static void setupClean() {
//delete source and target directories if they exist
FileUtil.fullyDelete(src);
@@ -100,7 +102,7 @@ public static void setupClean() {
/**
* Initialize clean environment for start of each test
*/
- @Before
+ @BeforeEach
public void setupDirs() throws IOException {
//check that we start out with empty top-level test data directory
assertFalse(src.exists());
@@ -176,7 +178,7 @@ private void validateTgtMult() throws IOException {
assertTrue(fetchFileContents(x3_mult).equals(str3));
}
- @After
+ @AfterEach
public void tearDown() throws IOException {
setupClean();
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java
index dce3b956d47ef..0a4dff0fbc2f3 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java
@@ -25,9 +25,11 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
-import static org.junit.Assert.*;
-import org.junit.Test;
-import org.junit.BeforeClass;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
import org.slf4j.event.Level;
/**
@@ -74,7 +76,7 @@ protected static void setTestPaths(Path testDir) {
FILE3 = new Path(DIR1, "file3");
}
- @BeforeClass
+ @BeforeAll
public static void testSetUp() throws Exception {
fs = FileSystem.getLocal(conf);
fs.delete(TEST_DIR, true);
@@ -160,18 +162,18 @@ public void testDirectory() throws IOException {
itor = fs.listFiles(TEST_DIR, true);
stat = itor.next();
assertTrue(stat.isFile());
- assertTrue("Path " + stat.getPath() + " unexpected",
- filesToFind.remove(stat.getPath()));
+ assertTrue(filesToFind.remove(stat.getPath()),
+ "Path " + stat.getPath() + " unexpected");
stat = itor.next();
assertTrue(stat.isFile());
- assertTrue("Path " + stat.getPath() + " unexpected",
- filesToFind.remove(stat.getPath()));
+ assertTrue(filesToFind.remove(stat.getPath()),
+ "Path " + stat.getPath() + " unexpected");
stat = itor.next();
assertTrue(stat.isFile());
- assertTrue("Path " + stat.getPath() + " unexpected",
- filesToFind.remove(stat.getPath()));
+ assertTrue(filesToFind.remove(stat.getPath()),
+ "Path " + stat.getPath() + " unexpected");
assertFalse(itor.hasNext());
assertTrue(filesToFind.isEmpty());
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
index 3693b4f0acde3..eb6d251add0c5 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
@@ -30,13 +30,16 @@
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.Shell;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-import org.junit.Test;
+import org.junit.jupiter.api.Timeout;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
/** This test LocalDirAllocator works correctly;
* Every test case uses different buffer dirs to
@@ -45,7 +48,6 @@
* a directory can be created in a read-only directory
* which breaks this test.
*/
-@RunWith(Parameterized.class)
public class TestLocalDirAllocator {
final static private Configuration conf = new Configuration();
final static private String BUFFER_DIR_ROOT = "build/test/temp";
@@ -62,8 +64,8 @@ public class TestLocalDirAllocator {
final static private String RELATIVE = "/RELATIVE";
final static private String ABSOLUTE = "/ABSOLUTE";
final static private String QUALIFIED = "/QUALIFIED";
- final private String ROOT;
- final private String PREFIX;
+ private String root;
+ private String prefix;
static {
try {
@@ -84,12 +86,11 @@ public class TestLocalDirAllocator {
BUFFER_DIR_ROOT).toUri().toString();
}
- public TestLocalDirAllocator(String root, String prefix) {
- ROOT = root;
- PREFIX = prefix;
+ public void initTestLocalDirAllocator(String paramRoot, String paramPrefix) {
+ this.root = paramRoot;
+ this.prefix = paramPrefix;
}
- @Parameters
public static Collection