diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 7a7e0e853e162..1d9fc166c745a 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -381,6 +381,26 @@ lz4-java provided + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.junit.platform + junit-platform-launcher + test + diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java index da429ffe960a4..69b67958c2532 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java @@ -27,7 +27,9 @@ import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.Progressable; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public class TestAfsCheckPath { @@ -56,11 +58,13 @@ public void testCheckPathWithTheSameNonDefaultPort() afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT)); } - @Test(expected=InvalidPathException.class) + @Test public void testCheckPathWithDifferentPorts() throws URISyntaxException { - URI uri = new URI("dummy://dummy-host:" + DEFAULT_PORT); - AbstractFileSystem afs = new DummyFileSystem(uri); - afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT)); + assertThrows(InvalidPathException.class, () -> { + URI uri = new URI("dummy://dummy-host:" + DEFAULT_PORT); + AbstractFileSystem afs = new DummyFileSystem(uri); + afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT)); + }); } private static class DummyFileSystem extends AbstractFileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java index f182fe5da7c36..647144206f122 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java @@ -24,8 +24,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestAvroFSInput { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java index 72e850b1313d5..e0c812cc8fa4a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java @@ -17,11 +17,12 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestBlockLocation { @@ -70,7 +71,8 @@ private static void checkBlockLocation(final BlockLocation loc, /** * Call all the constructors and verify the delegation is working properly */ - @Test(timeout = 5000) + @Test + @Timeout(value = 5) public void testBlockLocationConstructors() throws Exception { // BlockLocation loc; @@ -91,7 +93,8 @@ public void testBlockLocationConstructors() throws Exception { /** * Call each of the setters and verify */ - @Test(timeout = 5000) + @Test + @Timeout(value = 5) public void testBlockLocationSetters() throws Exception { BlockLocation loc; loc = new BlockLocation(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java index 8b42aa6779dad..03494e728937b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java @@ -26,8 +26,14 @@ import static org.apache.hadoop.fs.FileSystemTestHelper.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.*; -import static org.junit.Assert.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestChecksumFileSystem { static final String TEST_ROOT_DIR = @@ -35,7 +41,7 @@ public class TestChecksumFileSystem { static LocalFileSystem localFs; - @Before + @BeforeEach public void resetLocalFs() throws Exception { localFs = FileSystem.getLocal(new Configuration()); localFs.setVerifyChecksum(true); @@ -77,12 +83,12 @@ public void testVerifyChecksum() throws Exception { readFile(localFs, testPath, 1025); localFs.delete(localFs.getChecksumFile(testPath), true); - assertTrue("checksum deleted", !localFs.exists(localFs.getChecksumFile(testPath))); + assertTrue(!localFs.exists(localFs.getChecksumFile(testPath)), "checksum deleted"); //copying the wrong checksum file FileUtil.copy(localFs, localFs.getChecksumFile(testPath11), localFs, localFs.getChecksumFile(testPath),false,true,localFs.getConf()); - assertTrue("checksum exists", localFs.exists(localFs.getChecksumFile(testPath))); + assertTrue(localFs.exists(localFs.getChecksumFile(testPath)), "checksum exists"); boolean errorRead = false; try { @@ -90,12 +96,12 @@ public void testVerifyChecksum() throws Exception { }catch(ChecksumException ie) { errorRead = true; } - assertTrue("error reading", errorRead); + assertTrue(errorRead, "error reading"); //now setting verify false, the read should succeed localFs.setVerifyChecksum(false); String str = readFile(localFs, testPath, 1024).toString(); - assertTrue("read", "testing".equals(str)); + assertTrue("testing".equals(str), "read"); } @Test @@ -153,7 +159,7 @@ public void testTruncatedChecksum() throws Exception { // telling it not to verify checksums, should avoid issue. localFs.setVerifyChecksum(false); String str = readFile(localFs, testPath, 1024).toString(); - assertTrue("read", "testing truncation".equals(str)); + assertTrue("testing truncation".equals(str), "read"); } @Test @@ -164,13 +170,11 @@ public void testStreamType() throws Exception { localFs.setVerifyChecksum(true); in = localFs.open(testPath); - assertTrue("stream is input checker", - in.getWrappedStream() instanceof FSInputChecker); + assertTrue(in.getWrappedStream() instanceof FSInputChecker, "stream is input checker"); localFs.setVerifyChecksum(false); in = localFs.open(testPath); - assertFalse("stream is not input checker", - in.getWrappedStream() instanceof FSInputChecker); + assertFalse(in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker"); } @Test @@ -200,7 +204,7 @@ public void testCorruptedChecksum() throws Exception { } catch (ChecksumException ce) { e = ce; } finally { - assertNotNull("got checksum error", e); + assertNotNull(e, "got checksum error"); } localFs.setVerifyChecksum(false); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java index 084c6a0aef83d..aeac1fbad7556 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.fs; - -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.Arrays; @@ -30,8 +29,8 @@ import org.apache.hadoop.fs.shell.CommandFormat.NotEnoughArgumentsException; import org.apache.hadoop.fs.shell.CommandFormat.TooManyArgumentsException; import org.apache.hadoop.fs.shell.CommandFormat.UnknownOptionException; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * This class tests the command line parsing @@ -41,7 +40,7 @@ public class TestCommandFormat { private static List expectedArgs; private static Set expectedOpts; - @Before + @BeforeEach public void setUp() { args = new ArrayList<>(); expectedOpts = new HashSet<>(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java index 98f9f2021f8b4..c911d79e0146a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java @@ -17,14 +17,16 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; public class TestContentSummary { @@ -33,12 +35,12 @@ public class TestContentSummary { @Test public void testConstructorEmpty() { ContentSummary contentSummary = new ContentSummary.Builder().build(); - assertEquals("getLength", 0, contentSummary.getLength()); - assertEquals("getFileCount", 0, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", 0, contentSummary.getDirectoryCount()); - assertEquals("getQuota", -1, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", 0, contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota()); + assertEquals(0, contentSummary.getLength(), "getLength"); + assertEquals(0, contentSummary.getFileCount(), "getFileCount"); + assertEquals(0, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(-1, contentSummary.getQuota(), "getQuota"); + assertEquals(0, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the full constructor with quota information @@ -54,14 +56,13 @@ public void testConstructorWithQuota() { ContentSummary contentSummary = new ContentSummary.Builder().length(length). fileCount(fileCount).directoryCount(directoryCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - assertEquals("getLength", length, contentSummary.getLength()); - assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", directoryCount, - contentSummary.getDirectoryCount()); - assertEquals("getQuota", quota, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota()); + assertEquals(length, contentSummary.getLength(), "getLength"); + assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); + assertEquals(directoryCount, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(quota, contentSummary.getQuota(), "getQuota"); + assertEquals(spaceConsumed, + contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the constructor with quota information @@ -74,13 +75,13 @@ public void testConstructorNoQuota() { ContentSummary contentSummary = new ContentSummary.Builder().length(length). fileCount(fileCount).directoryCount(directoryCount). spaceConsumed(length).build(); - assertEquals("getLength", length, contentSummary.getLength()); - assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", directoryCount, - contentSummary.getDirectoryCount()); - assertEquals("getQuota", -1, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", length, contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota()); + assertEquals(length, contentSummary.getLength(), "getLength"); + assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); + assertEquals(directoryCount, + contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(-1, contentSummary.getQuota(), "getQuota"); + assertEquals(length, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the write method @@ -127,14 +128,12 @@ public void testReadFields() throws IOException { .thenReturn(spaceQuota); contentSummary.readFields(in); - assertEquals("getLength", length, contentSummary.getLength()); - assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", directoryCount, - contentSummary.getDirectoryCount()); - assertEquals("getQuota", quota, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota()); + assertEquals(length, contentSummary.getLength(), "getLength"); + assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); + assertEquals(directoryCount, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(quota, contentSummary.getQuota(), "getQuota"); + assertEquals(spaceConsumed, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the header with quotas diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java index 6b9a34c3b32eb..782a4e6411e50 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java @@ -19,16 +19,16 @@ import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test to make sure df can run and work. @@ -37,13 +37,13 @@ public class TestDFCachingGetSpaceUsed { final static private File DF_DIR = GenericTestUtils.getTestDir("testdfspace"); public static final int FILE_SIZE = 1024; - @Before + @BeforeEach public void setUp() { FileUtil.fullyDelete(DF_DIR); assertTrue(DF_DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(DF_DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java index 3476f3eef4329..ec6c2d13ca332 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; @@ -29,24 +29,23 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestDFVariations { private static final String TEST_ROOT_DIR = GenericTestUtils.getTestDir("testdfvariations").getAbsolutePath(); private static File test_root = null; - @Before + @BeforeEach public void setup() throws IOException { test_root = new File(TEST_ROOT_DIR); test_root.mkdirs(); } - @After + @AfterEach public void after() throws IOException { FileUtil.setWritable(test_root, true); FileUtil.fullyDelete(test_root); @@ -65,25 +64,26 @@ protected String[] getExecString() { } } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testMount() throws Exception { XXDF df = new XXDF(); String expectedMount = Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar"; - assertEquals("Invalid mount point", - expectedMount, df.getMount()); + assertEquals(expectedMount, df.getMount(), "Invalid mount point"); } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testFileSystem() throws Exception { XXDF df = new XXDF(); String expectedFileSystem = Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3"; - assertEquals("Invalid filesystem", - expectedFileSystem, df.getFilesystem()); + assertEquals(expectedFileSystem, df.getFilesystem(), "Invalid filesystem"); } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testDFInvalidPath() throws Exception { // Generate a path that doesn't exist Random random = new Random(0xDEADBEEFl); @@ -106,7 +106,8 @@ public void testDFInvalidPath() throws Exception { } } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testDFMalformedOutput() throws Exception { DF df = new DF(new File("/"), 0l); BufferedReader reader = new BufferedReader(new StringReader( @@ -152,19 +153,19 @@ public void testDFMalformedOutput() throws Exception { } } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testGetMountCurrentDirectory() throws Exception { File currentDirectory = new File("."); String workingDir = currentDirectory.getAbsoluteFile().getCanonicalPath(); DF df = new DF(new File(workingDir), 0L); String mountPath = df.getMount(); File mountDir = new File(mountPath); - assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should exist.", - mountDir.exists()); - assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should be directory.", - mountDir.isDirectory()); - assertTrue("Working dir ["+workingDir+"] should start with ["+mountPath+"].", - workingDir.startsWith(mountPath)); + assertTrue(mountDir.exists(), "Mount dir ["+mountDir.getAbsolutePath()+"] should exist."); + assertTrue(mountDir.isDirectory(), + "Mount dir ["+mountDir.getAbsolutePath()+"] should be directory."); + assertTrue(workingDir.startsWith(mountPath), + "Working dir ["+workingDir+"] should start with ["+mountPath+"]."); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index f340cc202ed01..654867972183a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -18,11 +18,11 @@ package org.apache.hadoop.fs; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.*; -import static org.junit.Assume.assumeFalse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; import java.io.File; import java.io.IOException; @@ -37,14 +37,14 @@ public class TestDU { final static private File DU_DIR = GenericTestUtils.getTestDir("dutmp"); - @Before + @BeforeEach public void setUp() { assumeFalse(Shell.WINDOWS); FileUtil.fullyDelete(DU_DIR); assertTrue(DU_DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(DU_DIR); } @@ -91,9 +91,8 @@ public void testDU() throws IOException, InterruptedException { long duSize = du.getUsed(); du.close(); - assertTrue("Invalid on-disk size", - duSize >= writtenSize && - writtenSize <= (duSize + slack)); + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + "Invalid on-disk size"); //test with 0 interval, will not launch thread du = new DU(file, 0, 1, -1); @@ -101,18 +100,16 @@ public void testDU() throws IOException, InterruptedException { duSize = du.getUsed(); du.close(); - assertTrue("Invalid on-disk size", - duSize >= writtenSize && - writtenSize <= (duSize + slack)); + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + "Invalid on-disk size"); //test without launching thread du = new DU(file, 10000, 0, -1); du.init(); duSize = du.getUsed(); - assertTrue("Invalid on-disk size", - duSize >= writtenSize && - writtenSize <= (duSize + slack)); + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + "Invalid on-disk size"); } @Test @@ -124,7 +121,7 @@ public void testDUGetUsedWillNotReturnNegative() throws IOException { DU du = new DU(file, 10000L, 0, -1); du.incDfsUsed(-Long.MAX_VALUE); long duSize = du.getUsed(); - assertTrue(String.valueOf(duSize), duSize >= 0L); + assertTrue(duSize >= 0L, String.valueOf(duSize)); } @Test @@ -133,14 +130,14 @@ public void testDUSetInitialValue() throws IOException { createFile(file, 8192); DU du = new DU(file, 3000, 0, 1024); du.init(); - assertTrue("Initial usage setting not honored", du.getUsed() == 1024); + assertTrue(du.getUsed() == 1024, "Initial usage setting not honored"); // wait until the first du runs. try { Thread.sleep(5000); } catch (InterruptedException ie) {} - assertTrue("Usage didn't get updated", du.getUsed() == 8192); + assertTrue(du.getUsed() == 8192, "Usage didn't get updated"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java index 9572bed4098f4..1a500ae9b65cb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.test.LambdaTestUtils.*; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java index 5de32861db68d..28e937f53bd15 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java @@ -21,8 +21,9 @@ import org.apache.commons.net.ftp.FTP; import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestDelegateToFileSystem { @@ -37,7 +38,7 @@ private void testDefaultUriInternal(String defaultUri) FileSystem.setDefaultUri(conf, defaultUri); final AbstractFileSystem ftpFs = AbstractFileSystem.get(FTP_URI_NO_PORT, conf); - Assert.assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri()); + assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri()); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java index 6030c12c16c4d..51638985bc7c6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java @@ -26,7 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * The default port of DelegateToFileSystem is set from child file system. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java index 582bc3142c872..8d11297d7cfd7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java @@ -18,8 +18,17 @@ package org.apache.hadoop.fs; import java.io.IOException; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DelegationTokenRenewer.Renewable; @@ -27,8 +36,9 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Time; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -42,7 +52,7 @@ public abstract class RenewableFileSystem extends FileSystem Configuration conf; FileSystem fs; - @Before + @BeforeEach public void setup() { DelegationTokenRenewer.renewCycle = RENEW_CYCLE; DelegationTokenRenewer.reset(); @@ -69,8 +79,8 @@ public Long answer(InvocationOnMock invocation) { renewer.addRenewAction(fs); - assertEquals("FileSystem not added to DelegationTokenRenewer", 1, - renewer.getRenewQueueLength()); + assertEquals(1, renewer.getRenewQueueLength(), + "FileSystem not added to DelegationTokenRenewer"); Thread.sleep(RENEW_CYCLE*2); verify(token, atLeast(2)).renew(eq(conf)); @@ -82,8 +92,8 @@ public Long answer(InvocationOnMock invocation) { verify(fs, never()).getDelegationToken(null); verify(fs, never()).setDelegationToken(any()); - assertEquals("FileSystem not removed from DelegationTokenRenewer", 0, - renewer.getRenewQueueLength()); + assertEquals(0, renewer.getRenewQueueLength(), + "FileSystem not removed from DelegationTokenRenewer"); } @Test @@ -179,7 +189,8 @@ public Long answer(InvocationOnMock invocation) { assertEquals(0, renewer.getRenewQueueLength()); } - @Test(timeout=4000) + @Test + @Timeout(value = 4) public void testMultipleTokensDoNotDeadlock() throws IOException, InterruptedException { Configuration conf = mock(Configuration.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java index 60b24c776c14e..5d792713bfd70 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,32 +51,32 @@ public void testConfBasedAndAPIBasedSetUMask() throws Exception { String defaultlUMask = conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY); - assertEquals("Default UMask changed!", "022", defaultlUMask); + assertEquals("022", defaultlUMask, "Default UMask changed!"); URI uri1 = new URI("file://mydfs:50070/"); URI uri2 = new URI("file://tmp"); FileContext fc1 = FileContext.getFileContext(uri1, conf); FileContext fc2 = FileContext.getFileContext(uri2, conf); - assertEquals("Umask for fc1 is incorrect", 022, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 022, fc2.getUMask().toShort()); + assertEquals(022, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(022, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); // Till a user explicitly calls FileContext.setUMask(), the updates through // configuration should be reflected.. conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "011"); - assertEquals("Umask for fc1 is incorrect", 011, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 011, fc2.getUMask().toShort()); + assertEquals(011, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(011, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); // Stop reflecting the conf update for specific FileContexts, once an // explicit setUMask is done. conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "066"); fc1.setUMask(FsPermission.createImmutable((short) 00033)); - assertEquals("Umask for fc1 is incorrect", 033, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 066, fc2.getUMask().toShort()); + assertEquals(033, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(066, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); fc2.setUMask(FsPermission.createImmutable((short) 00044)); - assertEquals("Umask for fc1 is incorrect", 033, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 044, fc2.getUMask().toShort()); + assertEquals(033, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(044, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java index 40db1fdda2130..df742f7223d52 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java @@ -20,16 +20,18 @@ import java.io.IOException; import java.util.Set; -import org.junit.Assert; import org.apache.hadoop.util.ShutdownHookManager; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.FileContextTestHelper.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** - * Tests {@link FileContext.#deleteOnExit(Path)} functionality. + * Tests {@link FileContext#deleteOnExit(Path)} functionality. */ public class TestFileContextDeleteOnExit { private static int blockSize = 1024; @@ -38,23 +40,23 @@ public class TestFileContextDeleteOnExit { private final FileContextTestHelper helper = new FileContextTestHelper(); private FileContext fc; - @Before + @BeforeEach public void setup() throws IOException { fc = FileContext.getLocalFSFileContext(); } - @After + @AfterEach public void tearDown() throws IOException { fc.delete(helper.getTestRootPath(fc), true); } private void checkDeleteOnExitData(int size, FileContext fc, Path... paths) { - Assert.assertEquals(size, FileContext.DELETE_ON_EXIT.size()); + assertEquals(size, FileContext.DELETE_ON_EXIT.size()); Set set = FileContext.DELETE_ON_EXIT.get(fc); - Assert.assertEquals(paths.length, (set == null ? 0 : set.size())); + assertEquals(paths.length, (set == null ? 0 : set.size())); for (Path path : paths) { - Assert.assertTrue(set.contains(path)); + assertTrue(set.contains(path)); } } @@ -67,7 +69,7 @@ public void testDeleteOnExit() throws Exception { checkDeleteOnExitData(1, fc, file1); // Ensure shutdown hook is added - Assert.assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER)); + assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER)); Path file2 = helper.getTestRootPath(fc, "dir1/file2"); createFile(fc, file2, numBlocks, blockSize); @@ -83,8 +85,8 @@ public void testDeleteOnExit() throws Exception { // paths are cleaned up FileContext.FINALIZER.run(); checkDeleteOnExitData(0, fc, new Path[0]); - Assert.assertFalse(exists(fc, file1)); - Assert.assertFalse(exists(fc, file2)); - Assert.assertFalse(exists(fc, dir)); + assertFalse(exists(fc, file1)); + assertFalse(exists(fc, file2)); + assertFalse(exists(fc, dir)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java index 2919de20bffd9..7dd9590d944e7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java @@ -24,9 +24,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Tests resolution of AbstractFileSystems for a given path with symlinks. @@ -42,12 +44,13 @@ public class TestFileContextResolveAfs { private FileContext fc; private FileSystem localFs; - @Before + @BeforeEach public void setup() throws IOException { fc = FileContext.getFileContext(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFileContextResolveAfs() throws IOException { Configuration conf = new Configuration(); localFs = FileSystem.get(conf); @@ -60,7 +63,7 @@ public void testFileContextResolveAfs() throws IOException { fc.createSymlink(localPath, linkPath, true); Set afsList = fc.resolveAbstractFileSystems(linkPath); - Assert.assertEquals(1, afsList.size()); + assertEquals(1, afsList.size()); localFs.delete(linkPath, true); localFs.delete(localPath, true); localFs.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java index 61a688ea4ee8b..3266bb657c4d0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -32,7 +34,7 @@ import java.util.Collections; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -85,8 +87,7 @@ public void testFileStatusWritable() throws Exception { int iterator = 0; for (FileStatus fs : tests) { dest.readFields(in); - assertEquals("Different FileStatuses in iteration " + iterator, - dest, fs); + assertEquals(dest, fs, "Different FileStatuses in iteration " + iterator); iterator++; } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java index 2b8be39193a03..d792a49554b3c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java @@ -18,8 +18,8 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.IOException; import java.net.URI; @@ -29,8 +29,8 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.NetUtilsTestResolver; import org.apache.hadoop.util.Progressable; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class TestFileSystemCanonicalization { static String[] authorities = { @@ -44,7 +44,7 @@ public class TestFileSystemCanonicalization { }; - @BeforeClass + @BeforeAll public static void initialize() throws Exception { NetUtilsTestResolver.install(); } @@ -288,7 +288,7 @@ void verifyCheckPath(FileSystem fs, String path, boolean shouldPass) { } assertEquals(pathAuthority, fqPath.toUri().getAuthority()); } else { - assertNotNull("did not fail", e); + assertNotNull(e, "did not fail"); assertEquals("Wrong FS: "+rawPath+", expected: "+fs.getUri(), e.getMessage()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java index 10ad8a14487ef..c65ba2d7dfa9d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java @@ -27,11 +27,12 @@ import java.net.URL; import java.util.ServiceConfigurationError; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; /** * Tests related to filesystem creation and lifecycle. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java index 5710049afb104..e68a0857723c7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java @@ -21,24 +21,23 @@ import org.apache.commons.lang3.RandomUtils; import org.apache.hadoop.fs.StorageStatistics.LongStatistic; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Iterator; -import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; /** * This tests basic operations of {@link FileSystemStorageStatistics} class. */ +@Timeout(10) public class TestFileSystemStorageStatistics { private static final Logger LOG = LoggerFactory.getLogger( TestFileSystemStorageStatistics.class); @@ -62,10 +61,7 @@ public class TestFileSystemStorageStatistics { private FileSystemStorageStatistics storageStatistics = new FileSystemStorageStatistics(FS_STORAGE_STATISTICS_NAME, statistics); - @Rule - public final Timeout globalTimeout = new Timeout(10, TimeUnit.SECONDS); - - @Before + @BeforeEach public void setup() { statistics.incrementBytesRead(RandomUtils.nextInt(0, 100)); statistics.incrementBytesWritten(RandomUtils.nextInt(0, 100)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java index 0372537cb3475..90edf7d4ff5b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java @@ -18,8 +18,15 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; @@ -28,7 +35,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java index 1b42290cedc5e..6ce01fe7176e1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java @@ -18,8 +18,16 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.IOException; import java.lang.reflect.Method; @@ -36,8 +44,8 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.DelegationTokenIssuer; import org.apache.hadoop.util.Progressable; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; public class TestFilterFileSystem { @@ -45,7 +53,7 @@ public class TestFilterFileSystem { private static final Logger LOG = FileSystem.LOG; private static final Configuration conf = new Configuration(); - @BeforeClass + @BeforeAll public static void setup() { conf.set("fs.flfs.impl", FilterLocalFileSystem.class.getName()); conf.setBoolean("fs.flfs.impl.disable.cache", true); @@ -179,8 +187,8 @@ public void testFilterFileSystem() throws Exception { } } } - assertTrue((errors + " methods were not overridden correctly - see" + - " log"), errors <= 0); + assertTrue(errors <= 0, (errors + " methods were not overridden correctly - see" + + " log")); } @Test @@ -299,11 +307,8 @@ public void testFilterPathCapabilites() throws Exception { try (FilterFileSystem flfs = new FilterLocalFileSystem()) { flfs.initialize(URI.create("filter:/"), conf); Path src = new Path("/src"); - assertFalse( - "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " - + flfs, - flfs.hasPathCapability(src, - CommonPathCapabilities.FS_MULTIPART_UPLOADER)); + assertFalse(flfs.hasPathCapability(src, CommonPathCapabilities.FS_MULTIPART_UPLOADER), + "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + flfs); } } @@ -325,7 +330,7 @@ private void checkFsConf(FileSystem fs, Configuration conf, int expectDepth) { int depth = 0; while (true) { depth++; - assertFalse("depth "+depth+">"+expectDepth, depth > expectDepth); + assertFalse(depth > expectDepth, "depth "+depth+">"+expectDepth); assertEquals(conf, fs.getConf()); if (!(fs instanceof FilterFileSystem)) { break; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java index 396924810d98e..77794490744c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.viewfs.ConfigUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; public class TestFilterFs { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java index 574ed704da277..f3c822a985d29 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.util.DataChecksum; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestFsOptions { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java index 67906d526bc8a..a8020a66183a2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java @@ -22,9 +22,12 @@ import org.apache.hadoop.fs.shell.CommandFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.assertj.core.api.Assertions; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TestFsShell { @@ -65,11 +68,11 @@ public void testDFSWithInvalidCommmand() throws Throwable { try (GenericTestUtils.SystemErrCapturer capture = new GenericTestUtils.SystemErrCapturer()) { ToolRunner.run(shell, new String[]{"dfs -mkdirs"}); - Assertions.assertThat(capture.getOutput()) + assertThat(capture.getOutput()) .as("FSShell dfs command did not print the error " + "message when invalid command is passed") .contains("-mkdirs: Unknown command"); - Assertions.assertThat(capture.getOutput()) + assertThat(capture.getOutput()) .as("FSShell dfs command did not print help " + "message when invalid command is passed") .contains("Usage: hadoop fs [generic options]"); @@ -79,22 +82,22 @@ public void testDFSWithInvalidCommmand() throws Throwable { @Test public void testExceptionNullMessage() throws Exception { final String cmdName = "-cmdExNullMsg"; - final Command cmd = Mockito.mock(Command.class); - Mockito.when(cmd.run(Mockito.any())).thenThrow( + final Command cmd = mock(Command.class); + when(cmd.run(any())).thenThrow( new IllegalArgumentException()); - Mockito.when(cmd.getUsage()).thenReturn(cmdName); + when(cmd.getUsage()).thenReturn(cmdName); - final CommandFactory cmdFactory = Mockito.mock(CommandFactory.class); + final CommandFactory cmdFactory = mock(CommandFactory.class); final String[] names = {cmdName}; - Mockito.when(cmdFactory.getNames()).thenReturn(names); - Mockito.when(cmdFactory.getInstance(cmdName)).thenReturn(cmd); + when(cmdFactory.getNames()).thenReturn(names); + when(cmdFactory.getInstance(cmdName)).thenReturn(cmd); FsShell shell = new FsShell(new Configuration()); shell.commandFactory = cmdFactory; try (GenericTestUtils.SystemErrCapturer capture = new GenericTestUtils.SystemErrCapturer()) { ToolRunner.run(shell, new String[]{cmdName}); - Assertions.assertThat(capture.getOutput()) + assertThat(capture.getOutput()) .contains(cmdName + ": Null exception message"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java index 7556bc75fb27a..319ae0e2d8a5b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java @@ -20,10 +20,10 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayOutputStream; import java.io.File; @@ -34,9 +34,9 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,7 +48,7 @@ public class TestFsShellCopy { static LocalFileSystem lfs; static Path testRootDir, srcPath, dstPath; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = new Configuration(); shell = new FsShell(conf); @@ -62,7 +62,7 @@ public static void setup() throws Exception { dstPath = new Path(testRootDir, "dstFile"); } - @Before + @BeforeEach public void prepFiles() throws Exception { lfs.setVerifyChecksum(true); lfs.setWriteChecksum(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java index 05ad5c23e6542..c2a3a1c1efdc9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java @@ -19,11 +19,12 @@ package org.apache.hadoop.fs; import org.apache.hadoop.conf.Configuration; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; /** * Test FsShell -ls command. @@ -34,7 +35,7 @@ public class TestFsShellList { private static LocalFileSystem lfs; private static Path testRootDir; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = new Configuration(); shell = new FsShell(conf); @@ -47,7 +48,7 @@ public static void setup() throws Exception { assertThat(lfs.mkdirs(testRootDir)).isTrue(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { lfs.delete(testRootDir, true); } @@ -78,14 +79,15 @@ public void testList() throws Exception { /* UGI params should take effect when we pass. */ - @Test(expected = IllegalArgumentException.class) + @Test public void testListWithUGI() throws Exception { - FsShell fsShell = new FsShell(new Configuration()); - //Passing Dummy such that it should through IAE - fsShell.getConf() - .set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, - "DUMMYAUTH"); - String[] lsArgv = new String[] {"-ls", testRootDir.toString()}; - fsShell.run(lsArgv); + assertThrows(IllegalArgumentException.class, () -> { + FsShell fsShell = new FsShell(new Configuration()); + //Passing Dummy such that it should through IAE + fsShell.getConf().set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "DUMMYAUTH"); + String[] lsArgv = new String[]{"-ls", testRootDir.toString()}; + fsShell.run(lsArgv); + }); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 77b2f445a48de..34a6d254940ce 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -19,9 +19,9 @@ package org.apache.hadoop.fs; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -42,8 +42,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,7 +60,7 @@ public class TestFsShellReturnCode { private static FileSystem fileSys; private static FsShell fsShell; - @BeforeClass + @BeforeAll public static void setup() throws IOException { conf.setClass("fs.file.impl", LocalFileSystemExtn.class, LocalFileSystem.class); fileSys = FileSystem.get(conf); @@ -105,14 +106,10 @@ private void change(int exit, String owner, String group, String...files) FileStatus[] stats = fileSys.globStatus(new Path(files[i])); if (stats != null) { for (int j=0; j < stats.length; j++) { - assertEquals("check owner of " + files[i], - ((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()), - stats[j].getOwner() - ); - assertEquals("check group of " + files[i], - ((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()), - stats[j].getGroup() - ); + assertEquals(((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()), + stats[j].getOwner(), "check owner of " + files[i]); + assertEquals(((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()), + stats[j].getGroup(), "check group of " + files[i]); } } } @@ -127,7 +124,8 @@ private void change(int exit, String owner, String group, String...files) * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChmod() throws Exception { Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists"); @@ -183,7 +181,8 @@ public void testChmod() throws Exception { * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChown() throws Exception { Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists"); @@ -239,7 +238,8 @@ public void testChown() throws Exception { * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChgrp() throws Exception { Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists"); @@ -284,7 +284,8 @@ public void testChgrp() throws Exception { change(1, null, "admin", f2, f7); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole() throws Exception { Configuration conf = new Configuration(); @@ -303,20 +304,22 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole() args[0] = "-get"; args[1] = new Path(tdir.toUri().getPath(), "/invalidSrc").toString(); args[2] = new Path(tdir.toUri().getPath(), "/invalidDst").toString(); - assertTrue("file exists", !fileSys.exists(new Path(args[1]))); - assertTrue("file exists", !fileSys.exists(new Path(args[2]))); + assertTrue(!fileSys.exists(new Path(args[1])), "file exists"); + assertTrue(!fileSys.exists(new Path(args[2])), "file exists"); int run = shell.run(args); results = bytes.toString(); - assertEquals("Return code should be 1", 1, run); - assertTrue(" Null is coming when source path is invalid. ",!results.contains("get: null")); - assertTrue(" Not displaying the intended message ",results.contains("get: `"+args[1]+"': No such file or directory")); + assertEquals(1, run, "Return code should be 1"); + assertTrue(!results.contains("get: null"), " Null is coming when source path is invalid. "); + assertTrue(results.contains("get: `" + args[1] + "': No such file or directory"), + " Not displaying the intended message "); } finally { IOUtils.closeStream(out); System.setErr(oldErr); } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRmWithNonexistentGlob() throws Exception { Configuration conf = new Configuration(); FsShell shell = new FsShell(); @@ -337,7 +340,8 @@ public void testRmWithNonexistentGlob() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRmForceWithNonexistentGlob() throws Exception { Configuration conf = new Configuration(); FsShell shell = new FsShell(); @@ -356,7 +360,8 @@ public void testRmForceWithNonexistentGlob() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testInvalidDefaultFS() throws Exception { // if default fs doesn't exist or is invalid, but the path provided in // arguments is valid - fsshell should work @@ -379,7 +384,7 @@ public void testInvalidDefaultFS() throws Exception { int run = shell.run(args); results = bytes.toString(); LOG.info("result=" + results); - assertTrue("Return code should be 0", run == 0); + assertTrue(run == 0, "Return code should be 0"); } finally { IOUtils.closeStream(out); System.setErr(oldErr); @@ -387,7 +392,8 @@ public void testInvalidDefaultFS() throws Exception { } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testInterrupt() throws Exception { MyFsShell shell = new MyFsShell(); shell.setConf(new Configuration()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java index c2bd5b2133d47..e76ed27bb9e58 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java @@ -24,9 +24,9 @@ import org.apache.hadoop.fs.shell.TouchCommands.Touch; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,7 +39,7 @@ public class TestFsShellTouch { static LocalFileSystem lfs; static Path testRootDir; - @BeforeClass + @BeforeAll public static void setup() throws Exception { Configuration conf = new Configuration(); shell = new FsShell(conf); @@ -51,7 +51,7 @@ public static void setup() throws Exception { lfs.setWorkingDirectory(testRootDir); } - @Before + @BeforeEach public void prepFiles() throws Exception { lfs.setVerifyChecksum(true); lfs.setWriteChecksum(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java index d15c1ac515856..37499d3b1cb10 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java @@ -14,15 +14,16 @@ package org.apache.hadoop.fs; import org.apache.hadoop.conf.Configuration; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import java.io.*; import java.net.URL; import java.nio.file.Paths; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * Test case for FsUrlConnection with relativePath and SPACE. */ @@ -43,7 +44,7 @@ public class TestFsUrlConnectionPath { private static final Configuration CONFIGURATION = new Configuration(); - @BeforeClass + @BeforeAll public static void initialize() throws IOException{ write(ABSOLUTE_PATH.substring(5), DATA); write(RELATIVE_PATH.substring(5), DATA); @@ -52,7 +53,7 @@ public static void initialize() throws IOException{ URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); } - @AfterClass + @AfterAll public static void cleanup(){ delete(ABSOLUTE_PATH.substring(5)); delete(RELATIVE_PATH.substring(5)); @@ -83,25 +84,25 @@ public static int readStream(String path) throws Exception{ @Test public void testAbsolutePath() throws Exception{ int length = readStream(ABSOLUTE_PATH); - Assert.assertTrue(length > 1); + assertTrue(length > 1); } @Test public void testRelativePath() throws Exception{ int length = readStream(RELATIVE_PATH); - Assert.assertTrue(length > 1); + assertTrue(length > 1); } @Test public void testAbsolutePathWithSpace() throws Exception{ int length = readStream(ABSOLUTE_PATH_W_ENCODED_SPACE); - Assert.assertTrue(length > 1); + assertTrue(length > 1); } @Test public void testRelativePathWithSpace() throws Exception{ int length = readStream(RELATIVE_PATH_W_ENCODED_SPACE); - Assert.assertTrue(length > 1); + assertTrue(length > 1); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java index f43480e78df35..932ace76d2595 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java @@ -22,10 +22,11 @@ import java.util.Comparator; import java.util.Random; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; @@ -42,7 +43,7 @@ public class TestGetFileBlockLocations { private FileSystem fs; private Random random; - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); Path rootPath = new Path(TEST_ROOT_DIR); @@ -92,7 +93,7 @@ public int compare(BlockLocation arg0, BlockLocation arg1) { } } - @After + @AfterEach public void tearDown() throws IOException { fs.delete(path, true); fs.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java index d696dbfe40f57..7ef34281982a9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java @@ -19,26 +19,29 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestGetSpaceUsed { final static private File DIR = GenericTestUtils.getTestDir("TestGetSpaceUsed"); - @Before + @BeforeEach public void setUp() { FileUtil.fullyDelete(DIR); assertTrue(DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java index 9d75ba0160ba7..b18047b771e04 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.util.List; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestGlobExpander { @@ -55,11 +55,11 @@ private void checkExpansionIsIdentical(String filePattern) throws IOException { private void checkExpansion(String filePattern, String... expectedExpansions) throws IOException { List actualExpansions = GlobExpander.expand(filePattern); - assertEquals("Different number of expansions", expectedExpansions.length, - actualExpansions.size()); + assertEquals(expectedExpansions.length, + actualExpansions.size(), "Different number of expansions"); for (int i = 0; i < expectedExpansions.length; i++) { - assertEquals("Expansion of " + filePattern, expectedExpansions[i], - actualExpansions.get(i)); + assertEquals(expectedExpansions[i], + actualExpansions.get(i), "Expansion of " + filePattern); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java index b409a8f929421..27ae520aa9fd4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java @@ -18,8 +18,9 @@ package org.apache.hadoop.fs; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.re2j.PatternSyntaxException; /** @@ -31,8 +32,7 @@ private void assertMatch(boolean yes, String glob, String...input) { for (String s : input) { boolean result = pattern.matches(s); - assertTrue(glob +" should"+ (yes ? "" : " not") +" match "+ s, - yes ? result : !result); + assertTrue(yes ? result : !result, glob +" should"+ (yes ? "" : " not") +" match "+ s); } } @@ -45,7 +45,7 @@ private void shouldThrow(String... globs) { e.printStackTrace(); continue; } - assertTrue("glob "+ glob +" should throw", false); + assertTrue(false, "glob "+ glob +" should throw"); } } @@ -72,7 +72,8 @@ private void shouldThrow(String... globs) { shouldThrow("[", "[[]]", "{", "\\"); } - @Test(timeout=10000) public void testPathologicalPatterns() { + @Test @Timeout(value = 10) + public void testPathologicalPatterns() { String badFilename = "job_1429571161900_4222-1430338332599-tda%2D%2D+******************************+++...%270%27%28Stage-1430338580443-39-2000-SUCCEEDED-production%2Dhigh-1430338340360.jhist"; assertMatch(true, badFilename, badFilename); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java index 26d0361d6a255..612954de784db 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java @@ -28,8 +28,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.DelegationTokenIssuer; import org.apache.hadoop.util.Progressable; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,6 +46,7 @@ import static org.apache.hadoop.fs.Options.CreateOpts; import static org.apache.hadoop.fs.Options.Rename; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.fail; @SuppressWarnings("deprecation") public class TestHarFileSystem { @@ -277,7 +277,7 @@ static void checkInvalidPath(String s, Configuration conf) { final Path p = new Path(s); try { p.getFileSystem(conf); - Assert.fail(p + " is an invalid path."); + fail(p + " is an invalid path."); } catch (IOException e) { // Expected } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java index eccf491cca8e3..8a2b5fc19230a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java @@ -22,10 +22,9 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; @@ -34,9 +33,11 @@ import java.util.HashSet; import java.util.Set; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** @@ -125,7 +126,7 @@ private void writeVersionToMasterIndexImpl(int version, Path masterIndexPath) th } } - @Before + @BeforeEach public void before() throws Exception { final File rootDirIoFile = new File(rootPath.toUri().getPath()); rootDirIoFile.mkdirs(); @@ -138,7 +139,7 @@ public void before() throws Exception { harFileSystem = createHarFileSystem(conf); } - @After + @AfterEach public void after() throws Exception { // close Har FS: final FileSystem harFS = harFileSystem; @@ -256,11 +257,11 @@ public void testListLocatedStatus() throws Exception { RemoteIterator fileList = hfs.listLocatedStatus(path); while (fileList.hasNext()) { String fileName = fileList.next().getPath().getName(); - assertTrue(fileName + " not in expected files list", expectedFileNames.contains(fileName)); + assertTrue(expectedFileNames.contains(fileName), fileName + " not in expected files list"); expectedFileNames.remove(fileName); } - assertEquals("Didn't find all of the expected file names: " + expectedFileNames, - 0, expectedFileNames.size()); + assertEquals(0, expectedFileNames.size(), + "Didn't find all of the expected file names: " + expectedFileNames); } @Test @@ -273,10 +274,9 @@ public void testMakeQualifiedPath() throws Exception { + harPath.toUri().getPath().toString(); Path path = new Path(harPathWithUserinfo); Path qualifiedPath = path.getFileSystem(conf).makeQualified(path); - assertTrue(String.format( - "The qualified path (%s) did not match the expected path (%s).", - qualifiedPath.toString(), harPathWithUserinfo), - qualifiedPath.toString().equals(harPathWithUserinfo)); + assertTrue(qualifiedPath.toString().equals(harPathWithUserinfo), + String.format("The qualified path (%s) did not match the expected path (%s).", + qualifiedPath.toString(), harPathWithUserinfo)); } // ========== Negative: @@ -291,7 +291,7 @@ public void testNegativeInitWithoutIndex() throws Exception { final URI uri = new URI("har://" + harPath.toString()); try { hfs.initialize(uri, new Configuration()); - Assert.fail("Exception expected."); + fail("Exception expected."); } catch (IOException ioe) { // ok, expected. } @@ -302,7 +302,7 @@ public void testNegativeGetHarVersionOnNotInitializedFS() throws Exception { final HarFileSystem hfs = new HarFileSystem(localFileSystem); try { int version = hfs.getHarVersion(); - Assert.fail("Exception expected, but got a Har version " + version + "."); + fail("Exception expected, but got a Har version " + version + "."); } catch (IOException ioe) { // ok, expected. } @@ -326,7 +326,7 @@ public void testNegativeInitWithAnUnsupportedVersion() throws Exception { final URI uri = new URI("har://" + harPath.toString()); try { hfs.initialize(uri, new Configuration()); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -340,28 +340,28 @@ public void testNegativeHarFsModifications() throws Exception { try { harFileSystem.create(fooPath, new FsPermission("+rwx"), true, 1024, (short) 88, 1024, null); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setReplication(fooPath, (short) 55); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.delete(fooPath, true); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.mkdirs(fooPath, new FsPermission("+rwx")); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -369,35 +369,35 @@ public void testNegativeHarFsModifications() throws Exception { final Path indexPath = new Path(harPath, "_index"); try { harFileSystem.copyFromLocalFile(false, indexPath, fooPath); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.startLocalOutput(fooPath, indexPath); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.completeLocalOutput(fooPath, indexPath); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setOwner(fooPath, "user", "group"); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setPermission(fooPath, new FsPermission("+x")); - Assert.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -406,7 +406,7 @@ public void testNegativeHarFsModifications() throws Exception { @Test public void testHarFsWithoutAuthority() throws Exception { final URI uri = harFileSystem.getUri(); - Assert.assertNull("har uri authority not null: " + uri, uri.getAuthority()); + assertNull(uri.getAuthority(), "har uri authority not null: " + uri); FileContext.getFileContext(uri, conf); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java index 98ae8df891958..97023da62d2bf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java @@ -25,11 +25,13 @@ import java.util.Arrays; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import static org.junit.Assert.*; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.HardLink.*; @@ -85,7 +87,7 @@ public class TestHardLink { * Assure clean environment for start of testing * @throws IOException */ - @BeforeClass + @BeforeAll public static void setupClean() { //delete source and target directories if they exist FileUtil.fullyDelete(src); @@ -100,7 +102,7 @@ public static void setupClean() { /** * Initialize clean environment for start of each test */ - @Before + @BeforeEach public void setupDirs() throws IOException { //check that we start out with empty top-level test data directory assertFalse(src.exists()); @@ -176,7 +178,7 @@ private void validateTgtMult() throws IOException { assertTrue(fetchFileContents(x3_mult).equals(str3)); } - @After + @AfterEach public void tearDown() throws IOException { setupClean(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index dce3b956d47ef..0a4dff0fbc2f3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -25,9 +25,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import static org.junit.Assert.*; -import org.junit.Test; -import org.junit.BeforeClass; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeAll; import org.slf4j.event.Level; /** @@ -74,7 +76,7 @@ protected static void setTestPaths(Path testDir) { FILE3 = new Path(DIR1, "file3"); } - @BeforeClass + @BeforeAll public static void testSetUp() throws Exception { fs = FileSystem.getLocal(conf); fs.delete(TEST_DIR, true); @@ -160,18 +162,18 @@ public void testDirectory() throws IOException { itor = fs.listFiles(TEST_DIR, true); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue("Path " + stat.getPath() + " unexpected", - filesToFind.remove(stat.getPath())); + assertTrue(filesToFind.remove(stat.getPath()), + "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue("Path " + stat.getPath() + " unexpected", - filesToFind.remove(stat.getPath())); + assertTrue(filesToFind.remove(stat.getPath()), + "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue("Path " + stat.getPath() + " unexpected", - filesToFind.remove(stat.getPath())); + assertTrue(filesToFind.remove(stat.getPath()), + "Path " + stat.getPath() + " unexpected"); assertFalse(itor.hasNext()); assertTrue(filesToFind.isEmpty()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index 3693b4f0acde3..eb6d251add0c5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -30,13 +30,16 @@ import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.Shell; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; -import org.junit.Test; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** This test LocalDirAllocator works correctly; * Every test case uses different buffer dirs to @@ -45,7 +48,6 @@ * a directory can be created in a read-only directory * which breaks this test. */ -@RunWith(Parameterized.class) public class TestLocalDirAllocator { final static private Configuration conf = new Configuration(); final static private String BUFFER_DIR_ROOT = "build/test/temp"; @@ -62,8 +64,8 @@ public class TestLocalDirAllocator { final static private String RELATIVE = "/RELATIVE"; final static private String ABSOLUTE = "/ABSOLUTE"; final static private String QUALIFIED = "/QUALIFIED"; - final private String ROOT; - final private String PREFIX; + private String root; + private String prefix; static { try { @@ -84,12 +86,11 @@ public class TestLocalDirAllocator { BUFFER_DIR_ROOT).toUri().toString(); } - public TestLocalDirAllocator(String root, String prefix) { - ROOT = root; - PREFIX = prefix; + public void initTestLocalDirAllocator(String paramRoot, String paramPrefix) { + this.root = paramRoot; + this.prefix = paramPrefix; } - @Parameters public static Collection params() { Object [][] data = new Object[][] { { BUFFER_DIR_ROOT, RELATIVE }, @@ -107,8 +108,8 @@ private static void rmBufferDirs() throws IOException { private static void validateTempDirCreation(String dir) throws IOException { File result = createTempFile(SMALL_FILE_SIZE); - assertTrue("Checking for " + dir + " in " + result + " - FAILED!", - result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath())); + assertTrue(result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), + "Checking for " + dir + " in " + result + " - FAILED!"); } private static File createTempFile() throws IOException { @@ -122,18 +123,17 @@ private static File createTempFile(long size) throws IOException { } private String buildBufferDir(String dir, int i) { - return dir + PREFIX + i; + return dir + prefix + i; } - /** Two buffer dirs. The first dir does not exist & is on a read-only disk; - * The second dir exists & is RW - * @throws Exception - */ - @Test (timeout = 30000) - public void test0() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void test0(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); try { conf.set(CONTEXT, dir0 + "," + dir1); assertTrue(localFs.mkdirs(new Path(dir1))); @@ -151,11 +151,15 @@ public void test0() throws Exception { * The second dir exists & is RW * @throws Exception */ - @Test (timeout = 30000) - public void testROBufferDirAndRWBufferDir() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testROBufferDirAndRWBufferDir(String paramRoot, String paramPrefix) + throws Exception { assumeNotWindows(); - String dir1 = buildBufferDir(ROOT, 1); - String dir2 = buildBufferDir(ROOT, 2); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir1 = buildBufferDir(root, 1); + String dir2 = buildBufferDir(root, 2); try { conf.set(CONTEXT, dir1 + "," + dir2); assertTrue(localFs.mkdirs(new Path(dir2))); @@ -168,14 +172,18 @@ public void testROBufferDirAndRWBufferDir() throws Exception { rmBufferDirs(); } } + /** Two buffer dirs. Both do not exist but on a RW disk. * Check if tmp dirs are allocated in a round-robin */ - @Test (timeout = 30000) - public void testDirsNotExist() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testDirsNotExist(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - String dir2 = buildBufferDir(ROOT, 2); - String dir3 = buildBufferDir(ROOT, 3); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir2 = buildBufferDir(root, 2); + String dir3 = buildBufferDir(root, 3); try { conf.set(CONTEXT, dir2 + "," + dir3); @@ -185,9 +193,9 @@ public void testDirsNotExist() throws Exception { int secondDirIdx = (firstDirIdx == 2) ? 3 : 2; // check if tmp dirs are allocated in a round-robin manner - validateTempDirCreation(buildBufferDir(ROOT, firstDirIdx)); - validateTempDirCreation(buildBufferDir(ROOT, secondDirIdx)); - validateTempDirCreation(buildBufferDir(ROOT, firstDirIdx)); + validateTempDirCreation(buildBufferDir(root, firstDirIdx)); + validateTempDirCreation(buildBufferDir(root, secondDirIdx)); + validateTempDirCreation(buildBufferDir(root, firstDirIdx)); } finally { rmBufferDirs(); } @@ -197,11 +205,14 @@ public void testDirsNotExist() throws Exception { * Later disk1 becomes read-only. * @throws Exception */ - @Test (timeout = 30000) - public void testRWBufferDirBecomesRO() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testRWBufferDirBecomesRO(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - String dir3 = buildBufferDir(ROOT, 3); - String dir4 = buildBufferDir(ROOT, 4); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir3 = buildBufferDir(root, 3); + String dir4 = buildBufferDir(root, 4); try { conf.set(CONTEXT, dir3 + "," + dir4); assertTrue(localFs.mkdirs(new Path(dir3))); @@ -212,7 +223,7 @@ public void testRWBufferDirBecomesRO() throws Exception { // Determine the round-robin sequence int nextDirIdx = (dirAllocator.getCurrentDirectoryIndex() == 0) ? 3 : 4; - validateTempDirCreation(buildBufferDir(ROOT, nextDirIdx)); + validateTempDirCreation(buildBufferDir(root, nextDirIdx)); // change buffer directory 2 to be read only new File(new Path(dir4).toUri().getPath()).setReadOnly(); @@ -235,11 +246,15 @@ public void testRWBufferDirBecomesRO() throws Exception { * @throws Exception */ static final int TRIALS = 100; - @Test (timeout = 30000) - public void testCreateManyFiles() throws Exception { + + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testCreateManyFiles(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - String dir5 = buildBufferDir(ROOT, 5); - String dir6 = buildBufferDir(ROOT, 6); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir5 = buildBufferDir(root, 5); + String dir6 = buildBufferDir(root, 6); try { conf.set(CONTEXT, dir5 + "," + dir6); @@ -278,14 +293,17 @@ public void testCreateManyFiles() throws Exception { * * @throws Exception */ - @Test (timeout = 30000) - public void testCreateManyFilesRandom() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testCreateManyFilesRandom(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(paramRoot, paramPrefix); final int numDirs = 5; final int numTries = 100; String[] dirs = new String[numDirs]; for (int d = 0; d < numDirs; ++d) { - dirs[d] = buildBufferDir(ROOT, d); + dirs[d] = buildBufferDir(root, d); } boolean next_dir_not_selected_at_least_once = false; try { @@ -331,10 +349,14 @@ public void testCreateManyFilesRandom() throws Exception { * directory. With checkAccess true, the directory should not be created. * @throws Exception */ - @Test (timeout = 30000) - public void testLocalPathForWriteDirCreation() throws IOException { - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testLocalPathForWriteDirCreation(String paramRoot, String paramPrefix) + throws IOException { + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); try { conf.set(CONTEXT, dir0 + "," + dir1); assertTrue(localFs.mkdirs(new Path(dir1))); @@ -362,8 +384,11 @@ public void testLocalPathForWriteDirCreation() throws IOException { * Test when mapred.local.dir not configured and called * getLocalPathForWrite */ - @Test (timeout = 30000) - public void testShouldNotthrowNPE() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testShouldNotthrowNPE(String paramRoot, String paramPrefix) throws Exception { + initTestLocalDirAllocator(paramRoot, paramPrefix); Configuration conf1 = new Configuration(); try { dirAllocator.getLocalPathForWrite("/test", conf1); @@ -404,10 +429,13 @@ public void testShouldNotthrowNPE() throws Exception { * are mistakenly created from fully qualified path strings. * @throws IOException */ - @Test (timeout = 30000) - public void testNoSideEffects() throws IOException { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testNoSideEffects(String paramRoot, String paramPrefix) throws IOException { assumeNotWindows(); - String dir = buildBufferDir(ROOT, 0); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir = buildBufferDir(root, 0); try { conf.set(CONTEXT, dir); File result = dirAllocator.createTmpFileForWrite(FILENAME, -1, conf); @@ -426,10 +454,13 @@ public void testNoSideEffects() throws IOException { * * @throws IOException */ - @Test (timeout = 30000) - public void testGetLocalPathToRead() throws IOException { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testGetLocalPathToRead(String paramRoot, String paramPrefix) throws IOException { assumeNotWindows(); - String dir = buildBufferDir(ROOT, 0); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir = buildBufferDir(root, 0); try { conf.set(CONTEXT, dir); assertTrue(localFs.mkdirs(new Path(dir))); @@ -451,12 +482,14 @@ public void testGetLocalPathToRead() throws IOException { * * @throws IOException */ - @Test (timeout = 30000) - public void testGetAllLocalPathsToRead() throws IOException { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testGetAllLocalPathsToRead(String paramRoot, String paramPrefix) throws IOException { assumeNotWindows(); - - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); try { conf.set(CONTEXT, dir0 + "," + dir1); assertTrue(localFs.mkdirs(new Path(dir0))); @@ -478,8 +511,8 @@ public void testGetAllLocalPathsToRead() throws IOException { // test #next() while no element to iterate any more: try { Path p = pathIterable.iterator().next(); - assertFalse("NoSuchElementException must be thrown, but returned ["+p - +"] instead.", true); // exception expected + assertFalse(true, "NoSuchElementException must be thrown, but returned ["+p + +"] instead."); // exception expected } catch (NoSuchElementException nsee) { // okay } @@ -499,9 +532,12 @@ public void testGetAllLocalPathsToRead() throws IOException { } } - @Test (timeout = 30000) - public void testRemoveContext() throws IOException { - String dir = buildBufferDir(ROOT, 0); + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testRemoveContext(String paramRoot, String paramPrefix) throws IOException { + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir = buildBufferDir(root, 0); try { String contextCfgItemName = "application_1340842292563_0004.app.cache.dirs"; conf.set(contextCfgItemName, dir); @@ -521,15 +557,19 @@ public void testRemoveContext() throws IOException { * * @throws Exception */ - @Test(timeout = 30000) - public void testGetLocalPathForWriteForInvalidPaths() throws Exception { + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testGetLocalPathForWriteForInvalidPaths(String paramRoot, String paramPrefix) + throws Exception { + initTestLocalDirAllocator(paramRoot, paramPrefix); conf.set(CONTEXT, " "); try { dirAllocator.getLocalPathForWrite("/test", conf); fail("not throwing the exception"); } catch (IOException e) { - assertEquals("Incorrect exception message", - "No space available in any of the local directories.", e.getMessage()); + assertEquals("No space available in any of the local directories.", + e.getMessage(), "Incorrect exception message"); } } @@ -538,10 +578,14 @@ public void testGetLocalPathForWriteForInvalidPaths() throws Exception { * * @throws Exception */ - @Test(timeout = 30000) - public void testGetLocalPathForWriteForLessSpace() throws Exception { - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testGetLocalPathForWriteForLessSpace(String paramRoot, String paramPrefix) + throws Exception { + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); conf.set(CONTEXT, dir0 + "," + dir1); LambdaTestUtils.intercept(DiskErrorException.class, String.format("Could not find any valid local directory for %s with requested size %s", @@ -552,9 +596,12 @@ public void testGetLocalPathForWriteForLessSpace() throws Exception { /** * Test for HADOOP-18636 LocalDirAllocator cannot recover from directory tree deletion. */ - @Test(timeout = 30000) - public void testDirectoryRecovery() throws Throwable { - String dir0 = buildBufferDir(ROOT, 0); + @Timeout(value = 30) + @MethodSource("params") + @ParameterizedTest + public void testDirectoryRecovery(String paramRoot, String paramPrefix) throws Throwable { + initTestLocalDirAllocator(paramRoot, paramPrefix); + String dir0 = buildBufferDir(root, 0); String subdir = dir0 + "/subdir1/subdir2"; conf.set(CONTEXT, subdir); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 79049d3837134..d240929ff77b4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -40,32 +40,33 @@ import java.util.List; import java.util.Random; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.*; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import javax.annotation.Nonnull; -import static org.assertj.core.api.Assertions.assertThat; - /** * This class tests the local file system via the FileSystem abstraction. */ +@Timeout(60) public class TestLocalFileSystem { private static final File base = GenericTestUtils.getTestDir("work-dir/localfs"); @@ -75,19 +76,13 @@ public class TestLocalFileSystem { private Configuration conf; private LocalFileSystem fileSys; - /** - * Set the timeout for every test. - */ - @Rule - public Timeout testTimeout = new Timeout(60, TimeUnit.SECONDS); - private void cleanupFile(FileSystem fs, Path name) throws IOException { assertTrue(fs.exists(name)); fs.delete(name, true); assertTrue(!fs.exists(name)); } - @Before + @BeforeEach public void setup() throws IOException { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -95,7 +90,7 @@ public void setup() throws IOException { fileSys.delete(new Path(TEST_ROOT_DIR), true); } - @After + @AfterEach public void after() throws IOException { FileUtil.setWritable(base, true); FileUtil.fullyDelete(base); @@ -248,9 +243,9 @@ public void testCreateFileAndMkdirs() throws IOException { { //check FileStatus and ContentSummary final FileStatus status = fileSys.getFileStatus(test_file); - Assert.assertEquals(fileSize, status.getLen()); + assertEquals(fileSize, status.getLen()); final ContentSummary summary = fileSys.getContentSummary(test_dir); - Assert.assertEquals(fileSize, summary.getLength()); + assertEquals(fileSize, summary.getLength()); } // creating dir over a file @@ -281,10 +276,9 @@ public void testBasicDelete() throws IOException { assertTrue(fileSys.mkdirs(dir1)); writeFile(fileSys, file1, 1); writeFile(fileSys, file2, 1); - assertFalse("Returned true deleting non-existant path", - fileSys.delete(file3)); - assertTrue("Did not delete file", fileSys.delete(file1)); - assertTrue("Did not delete non-empty dir", fileSys.delete(dir1)); + assertFalse(fileSys.delete(file3), "Returned true deleting non-existant path"); + assertTrue(fileSys.delete(file1), "Did not delete file"); + assertTrue(fileSys.delete(dir1), "Did not delete non-empty dir"); } @Test @@ -318,9 +312,9 @@ public void testListStatusWithColons() throws IOException { File colonFile = new File(TEST_ROOT_DIR, "foo:bar"); colonFile.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR)); - assertEquals("Unexpected number of stats", 1, stats.length); - assertEquals("Bad path from stat", colonFile.getAbsolutePath(), - stats[0].getPath().toUri().getPath()); + assertEquals(1, stats.length, "Unexpected number of stats"); + assertEquals(colonFile.getAbsolutePath(), + stats[0].getPath().toUri().getPath(), "Bad path from stat"); } @Test @@ -333,9 +327,9 @@ public void testListStatusReturnConsistentPathOnWindows() throws IOException { File file = new File(dirNoDriveSpec, "foo"); file.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(dirNoDriveSpec)); - assertEquals("Unexpected number of stats", 1, stats.length); - assertEquals("Bad path from stat", new Path(file.getPath()).toUri().getPath(), - stats[0].getPath().toUri().getPath()); + assertEquals(1, stats.length, "Unexpected number of stats"); + assertEquals(new Path(file.getPath()).toUri().getPath(), + stats[0].getPath().toUri().getPath(), "Bad path from stat"); } @Test @@ -429,8 +423,9 @@ public void testSetTimes() throws Exception { long newAccTime = 23456000; FileStatus status = fileSys.getFileStatus(path); - assertTrue("check we're actually changing something", newModTime != status.getModificationTime()); - assertTrue("check we're actually changing something", newAccTime != status.getAccessTime()); + assertTrue(newModTime != status.getModificationTime(), + "check we're actually changing something"); + assertTrue(newAccTime != status.getAccessTime(), "check we're actually changing something"); fileSys.setTimes(path, newModTime, newAccTime); checkTimesStatus(path, newModTime, newAccTime); @@ -606,8 +601,8 @@ public void testStripFragmentFromPath() throws Exception { // Create test file with fragment FileSystemTestHelper.createFile(fs, pathWithFragment); Path resolved = fs.resolvePath(pathWithFragment); - assertEquals("resolvePath did not strip fragment from Path", pathQualified, - resolved); + assertEquals(pathQualified, + resolved, "resolvePath did not strip fragment from Path"); } @Test @@ -683,8 +678,8 @@ public void testFSOutputStreamBuilder() throws Exception { new byte[(int) (fileSys.getFileStatus(path).getLen())]; input.readFully(0, buffer); input.close(); - Assert.assertArrayEquals("The data be read should equals with the " - + "data written.", contentOrigin, buffer); + assertArrayEquals(contentOrigin, buffer, "The data be read should equals with the " + + "data written."); } catch (IOException e) { throw e; } @@ -770,8 +765,8 @@ public void testFSOutputStreamBuilderOptions() throws Exception { builder.must("strM", "value"); builder.must("unsupported", 12.34); - assertEquals("Optional value should be overwrite by a mandatory value", - "value", builder.getOptions().get("strM")); + assertEquals("value", builder.getOptions().get("strM"), + "Optional value should be overwrite by a mandatory value"); Set mandatoryKeys = builder.getMandatoryKeys(); Set expectedKeys = new HashSet<>(); @@ -799,8 +794,8 @@ protected Statistics getFileStatistics() { .stream() .filter(s -> s.getScheme().equals("file")) .collect(Collectors.toList()); - assertEquals("Number of statistics counters for file://", - 1, fileStats.size()); + assertEquals(1, fileStats.size(), + "Number of statistics counters for file://"); // this should be used for local and rawLocal, as they share the // same schema (although their class is different) return fileStats.get(0); @@ -832,8 +827,8 @@ private void assertWritesCRC(String operation, Path path, final long bytesOut0 = stats.getBytesWritten(); try { callable.call(); - assertEquals("Bytes written in " + operation + "; stats=" + stats, - CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0); + assertEquals(CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, + "Bytes written in " + operation + "; stats=" + stats); } finally { if (delete) { // clean up @@ -862,8 +857,8 @@ public void testCRCwithClassicAPIs() throws Throwable { final long bytesRead0 = stats.getBytesRead(); fileSys.open(file).close(); final long bytesRead1 = stats.getBytesRead(); - assertEquals("Bytes read in open() call with stats " + stats, - CRC_SIZE, bytesRead1 - bytesRead0); + assertEquals(CRC_SIZE, bytesRead1 - bytesRead0, + "Bytes read in open() call with stats " + stats); } /** @@ -974,8 +969,8 @@ public void testReadIncludesCRCwithBuilders() throws Throwable { // now read back the data, again with the builder API final long bytesRead0 = stats.getBytesRead(); fileSys.openFile(file).build().get().close(); - assertEquals("Bytes read in openFile() call with stats " + stats, - CRC_SIZE, stats.getBytesRead() - bytesRead0); + assertEquals(CRC_SIZE, stats.getBytesRead() - bytesRead0, + "Bytes read in openFile() call with stats " + stats); // now write with overwrite = true assertWritesCRC("createFileNonRecursive()", file, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java index 8e48035d7bd85..0c20167289e23 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java @@ -21,8 +21,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; @@ -33,7 +32,10 @@ import java.util.StringTokenizer; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * This class tests the local file system via the FileSystem abstraction. @@ -234,9 +236,9 @@ public void testSetUmaskInRealTime() throws Exception { try { assertTrue(localfs.mkdirs(dir)); FsPermission initialPermission = getPermission(localfs, dir); - assertEquals( - "With umask 022 permission should be 755 since the default " + - "permission is 777", new FsPermission("755"), initialPermission); + assertEquals(new FsPermission("755"), + initialPermission, "With umask 022 permission should be 755 since the default " + + "permission is 777"); // Modify umask and create a new directory // and check if new umask is applied @@ -244,12 +246,11 @@ public void testSetUmaskInRealTime() throws Exception { assertTrue(localfs.mkdirs(dir2)); FsPermission finalPermission = localfs.getFileStatus(dir2) .getPermission(); - Assertions.assertThat(new FsPermission("755")).as( + assertThat(new FsPermission("755")).as( "With umask 062 permission should not be 755 since the " + "default permission is 777").isNotEqualTo(finalPermission); - assertEquals( - "With umask 062 we expect 715 since the default permission is 777", - new FsPermission("715"), finalPermission); + assertEquals(new FsPermission("715"), finalPermission, + "With umask 062 we expect 715 since the default permission is 777"); } finally { conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "022"); cleanup(localfs, dir); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java index 4490f923e2459..a64d960d994dd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java @@ -19,7 +19,7 @@ package org.apache.hadoop.fs; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java index e3e20020e3242..37a2c93963c5e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestQuotaUsage { @@ -27,9 +27,9 @@ public class TestQuotaUsage { @Test public void testConstructorEmpty() { QuotaUsage quotaUsage = new QuotaUsage.Builder().build(); - assertEquals("getQuota", -1, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", 0, quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); + assertEquals(-1, quotaUsage.getQuota(), "getQuota"); + assertEquals(0, quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } // check the full constructor with quota information @@ -43,12 +43,12 @@ public void testConstructorWithQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - assertEquals("getFileAndDirectoryCount", fileAndDirCount, - quotaUsage.getFileAndDirectoryCount()); - assertEquals("getQuota", quota, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", spaceQuota, quotaUsage.getSpaceQuota()); + assertEquals(fileAndDirCount, + quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); + assertEquals(quota, quotaUsage.getQuota(), "getQuota"); + assertEquals(spaceConsumed, + quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceQuota, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } // check the constructor with quota information @@ -59,12 +59,12 @@ public void testConstructorNoQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount). spaceConsumed(spaceConsumed).build(); - assertEquals("getFileAndDirectoryCount", fileAndDirCount, - quotaUsage.getFileAndDirectoryCount()); - assertEquals("getQuota", -1, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); + assertEquals(fileAndDirCount, + quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); + assertEquals(-1, quotaUsage.getQuota(), "getQuota"); + assertEquals(spaceConsumed, + quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } // check the header diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index 30c9a31fda4ea..89d7419f763d2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -34,15 +34,15 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.TrashPolicyDefault.Emptier; @@ -60,7 +60,7 @@ public class TestTrash { private final static Path TEST_DIR = new Path(BASE_PATH.getAbsolutePath()); - @Before + @BeforeEach public void setUp() throws IOException { // ensure each test initiates a FileSystem instance, // avoid getting an old instance from cache. @@ -78,7 +78,7 @@ protected static Path mkdir(FileSystem fs, Path p) throws IOException { protected static void checkTrash(FileSystem trashFs, Path trashRoot, Path path) throws IOException { Path p = Path.mergePaths(trashRoot, path); - assertTrue("Could not find file in trash: "+ p , trashFs.exists(p)); + assertTrue(trashFs.exists(p), "Could not find file in trash: " + p); } // counts how many instances of the file are in the Trash @@ -169,7 +169,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Expunge should return zero", 0, val); + assertEquals(0, val, "Expunge should return zero"); } // Verify that we succeed in removing the file we created. @@ -181,7 +181,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); checkTrash(trashRootFs, trashRoot, fs.makeQualified(myFile)); } @@ -197,7 +197,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); } // Verify that we can recreate the file @@ -212,7 +212,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); } // recreate directory @@ -226,7 +226,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); } // Check that we can delete a file from the trash @@ -237,7 +237,7 @@ public static void trashShell(final Configuration conf, final Path base, val = shell.run(new String[] {"-rm", toErase.toString()}); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); checkNotInTrash(trashRootFs, trashRoot, toErase.toString()); checkNotInTrash(trashRootFs, trashRoot, toErase.toString()+".1"); } @@ -249,7 +249,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Expunge should return zero", 0, val); + assertEquals(0, val, "Expunge should return zero"); } // verify that after expunging the Trash, it really goes away @@ -268,7 +268,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); checkTrash(trashRootFs, trashRoot, myFile); args = new String[2]; @@ -277,7 +277,7 @@ public static void trashShell(final Configuration conf, final Path base, val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); checkTrash(trashRootFs, trashRoot, myPath); } @@ -289,7 +289,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return exit code 1", 1, val); + assertEquals(1, val, "Recursive Remove should return exit code 1"); assertTrue(trashRootFs.exists(trashRoot)); } @@ -307,17 +307,15 @@ public static void trashShell(final Configuration conf, final Path base, args[2] = myFile.toString(); int val = -1; // Clear out trash - assertEquals("-expunge failed", - 0, shell.run(new String[] {"-expunge" })); + assertEquals(0, shell.run(new String[] {"-expunge" }), "-expunge failed"); val = shell.run(args); - assertFalse("Expected TrashRoot (" + trashRoot + + assertFalse(trashRootFs.exists(trashRoot), "Expected TrashRoot (" + trashRoot + ") to exist in file system:" - + trashRootFs.getUri(), - trashRootFs.exists(trashRoot)); // No new Current should be created + + trashRootFs.getUri()); // No new Current should be created assertFalse(fs.exists(myFile)); - assertEquals("Remove with skipTrash should return zero", 0, val); + assertEquals(0, val, "Remove with skipTrash should return zero"); } // recreate directory and file @@ -340,15 +338,14 @@ public static void trashShell(final Configuration conf, final Path base, assertFalse(trashRootFs.exists(trashRoot)); // No new Current should be created assertFalse(fs.exists(myPath)); assertFalse(fs.exists(myFile)); - assertEquals("Remove with skipTrash should return zero", 0, val); + assertEquals(0, val, "Remove with skipTrash should return zero"); } // deleting same file multiple times { int val = -1; mkdir(fs, myPath); - assertEquals("Expunge should return zero", - 0, shell.run(new String[] {"-expunge" })); + assertEquals(0, shell.run(new String[] {"-expunge" }), "Expunge should return zero"); // create a file in that directory. @@ -363,7 +360,7 @@ public static void trashShell(final Configuration conf, final Path base, // delete file val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); } // current trash directory Path trashDir = Path.mergePaths(new Path(trashRoot.toUri().getPath()), @@ -377,7 +374,7 @@ public static void trashShell(final Configuration conf, final Path base, int count = countSameDeletedFiles(fs, trashDir, myFile); System.out.println("counted " + count + " files " + myFile.getName() + "* in " + trashDir); - assertEquals("Count should have returned 10", num_runs, count); + assertEquals(num_runs, count, "Count should have returned 10"); } //Verify skipTrash option is suggested when rm fails due to its absence @@ -397,11 +394,10 @@ public static void trashShell(final Configuration conf, final Path base, String output = byteStream.toString(); System.setOut(stdout); System.setErr(stderr); - assertTrue("skipTrash wasn't suggested as remedy to failed rm command" + - " or we deleted / even though we could not get server defaults", - output.indexOf("Consider using -skipTrash option") != -1 || - output.indexOf("Failed to determine server " - + "trash configuration") != -1); + assertTrue(output.indexOf("Consider using -skipTrash option") != -1 || + output.indexOf("Failed to determine server " + "trash configuration") != -1, + "skipTrash wasn't suggested as remedy to failed rm command" + + " or we deleted / even though we could not get server defaults"); } // Verify old checkpoint format is recognized @@ -423,11 +419,10 @@ public static void trashShell(final Configuration conf, final Path base, int rc = -1; rc = shell.run(new String[] {"-expunge" }); - assertEquals("Expunge should return zero", 0, rc); - assertFalse("old checkpoint format not recognized", - trashRootFs.exists(dirToDelete)); - assertTrue("old checkpoint format directory should not be removed", - trashRootFs.exists(dirToKeep)); + assertEquals(0, rc, "Expunge should return zero"); + assertFalse(trashRootFs.exists(dirToDelete), "old checkpoint format not recognized"); + assertTrue(trashRootFs.exists(dirToKeep), + "old checkpoint format directory should not be removed"); } // Verify expunge -immediate removes all checkpoints and current folder @@ -451,15 +446,12 @@ public static void trashShell(final Configuration conf, final Path base, int rc = -1; rc = shell.run(new String[] {"-expunge", "-immediate"}); - assertEquals("Expunge immediate should return zero", 0, rc); - assertFalse("Old checkpoint should be removed", - trashRootFs.exists(oldCheckpoint)); - assertFalse("Recent checkpoint should be removed", - trashRootFs.exists(recentCheckpoint)); - assertFalse("Current folder should be removed", - trashRootFs.exists(currentFolder)); - assertEquals("Ensure trash folder is empty", 0, - trashRootFs.listStatus(trashRoot.getParent()).length); + assertEquals(0, rc, "Expunge immediate should return zero"); + assertFalse(trashRootFs.exists(oldCheckpoint), "Old checkpoint should be removed"); + assertFalse(trashRootFs.exists(recentCheckpoint), "Recent checkpoint should be removed"); + assertFalse(trashRootFs.exists(currentFolder), "Current folder should be removed"); + assertEquals(0, trashRootFs.listStatus(trashRoot.getParent()).length, + "Ensure trash folder is empty"); } } @@ -510,16 +502,12 @@ public void testExpungeWithFileSystem() throws Exception { "-fs", "testlfs:/"}; int val = testlfsshell.run(args); - assertEquals("Expunge immediate with filesystem should return zero", - 0, val); - assertFalse("Old checkpoint should be removed", - testlfs.exists(oldCheckpoint)); - assertFalse("Recent checkpoint should be removed", - testlfs.exists(recentCheckpoint)); - assertFalse("Current folder should be removed", - testlfs.exists(currentFolder)); - assertEquals("Ensure trash folder is empty", 0, - testlfs.listStatus(trashRoot.getParent()).length); + assertEquals(0, val, "Expunge immediate with filesystem should return zero"); + assertFalse(testlfs.exists(oldCheckpoint), "Old checkpoint should be removed"); + assertFalse(testlfs.exists(recentCheckpoint), "Recent checkpoint should be removed"); + assertFalse(testlfs.exists(currentFolder), "Current folder should be removed"); + assertEquals(0, + testlfs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); // Incorrect FileSystem scheme String incorrectFS = "incorrectfs:/"; @@ -527,17 +515,15 @@ public void testExpungeWithFileSystem() throws Exception { "-fs", incorrectFS}; val = testlfsshell.run(args); - assertEquals("Expunge immediate should return exit code 1 when " - + "incorrect Filesystem is passed", - 1, val); + assertEquals(1, val, "Expunge immediate should return exit code 1 when " + + "incorrect Filesystem is passed"); // Empty FileSystem scheme args = new String[]{"-expunge", "-immediate", "-fs", ""}; val = testlfsshell.run(args); - assertNotEquals("Expunge immediate should fail when filesystem is NULL", - 0, val); + assertNotEquals(0, val, "Expunge immediate should fail when filesystem is NULL"); FileSystem.removeFileSystemForTesting(testlfsURI, config, testlfs); } } @@ -836,7 +822,7 @@ public Boolean get() { emptierThread.join(); } - @After + @AfterEach public void tearDown() throws IOException { File trashDir = new File(TEST_DIR.toUri().getPath()); if (trashDir.exists() && !FileUtil.fullyDelete(trashDir)) { @@ -969,18 +955,14 @@ public static void verifyMoveEmptyDirToTrash(FileSystem fs, Path trashRoot = trash.getCurrentTrashDir(emptyDir); fileSystem.delete(trashRoot, true); // Move to trash should be succeed - assertTrue("Move an empty directory to trash failed", - trash.moveToTrash(emptyDir)); + assertTrue(trash.moveToTrash(emptyDir), "Move an empty directory to trash failed"); // Verify the empty dir is removed - assertFalse("The empty directory still exists on file system", - fileSystem.exists(emptyDir)); + assertFalse(fileSystem.exists(emptyDir), "The empty directory still exists on file system"); emptyDir = fileSystem.makeQualified(emptyDir); Path dirInTrash = Path.mergePaths(trashRoot, emptyDir); - assertTrue("Directory wasn't moved to trash", - fileSystem.exists(dirInTrash)); + assertTrue(fileSystem.exists(dirInTrash), "Directory wasn't moved to trash"); FileStatus[] flist = fileSystem.listStatus(dirInTrash); - assertTrue("Directory is not empty", - flist!= null && flist.length == 0); + assertTrue(flist!= null && flist.length == 0, "Directory is not empty"); } } @@ -1029,15 +1011,14 @@ public static void verifyTrashPermission(FileSystem fs, Configuration conf) } Path fileInTrash = Path.mergePaths(trashDir, file); FileStatus fstat = wrapper.getFileStatus(fileInTrash); - assertTrue(String.format("File %s is not moved to trash", - fileInTrash.toString()), - wrapper.exists(fileInTrash)); + assertTrue(wrapper.exists(fileInTrash), String.format("File %s is not moved to trash", + fileInTrash.toString())); // Verify permission not change - assertTrue(String.format("Expected file: %s is %s, but actual is %s", + assertTrue(fstat.getPermission().equals(fsPermission), + String.format("Expected file: %s is %s, but actual is %s", fileInTrash.toString(), fsPermission.toString(), - fstat.getPermission().toString()), - fstat.getPermission().equals(fsPermission)); + fstat.getPermission().toString())); } // Verify the trash directory can be removed @@ -1077,11 +1058,9 @@ private void verifyAuditableTrashEmptier(Trash trash, emptierThread.join(); AuditableTrashPolicy at = (AuditableTrashPolicy) trash.getTrashPolicy(); - assertEquals( + assertEquals(expectedNumOfCheckpoints, at.getNumberOfCheckpoints(), String.format("Expected num of checkpoints is %s, but actual is %s", - expectedNumOfCheckpoints, at.getNumberOfCheckpoints()), - expectedNumOfCheckpoints, - at.getNumberOfCheckpoints()); + expectedNumOfCheckpoints, at.getNumberOfCheckpoints())); } catch (InterruptedException e) { // Ignore } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java index 799471b8c0355..56471739d4b15 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java @@ -20,11 +20,11 @@ import java.io.DataOutputStream; import java.io.IOException; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * test for the input truncation bug when mark/reset is used. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java index 618ddf97b5460..1760af6839663 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java @@ -22,7 +22,6 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Comparator; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.util.Preconditions; import org.apache.commons.net.ftp.FTP; @@ -39,29 +38,26 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test basic @{link FTPFileSystem} class methods. Contract tests are in * TestFTPContractXXXX. */ +@Timeout(180) public class TestFTPFileSystem { private FtpTestServer server; private java.nio.file.Path testDir; - @Rule - public Timeout testTimeout = new Timeout(180000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void setUp() throws Exception { testDir = Files.createTempDirectory( GenericTestUtils.getTestDir().toPath(), getClass().getName() @@ -69,7 +65,7 @@ public void setUp() throws Exception { server = new FtpTestServer(testDir).start(); } - @After + @AfterEach @SuppressWarnings("ResultOfMethodCallIgnored") public void tearDown() throws Exception { if (server != null) { @@ -98,7 +94,7 @@ public void testCreateWithWritePermissions() throws Exception { outputStream.write(bytesExpected); } try (FSDataInputStream input = fs.open(new Path("test1.txt"))) { - assertThat(bytesExpected, equalTo(IOUtils.readFullyToByteArray(input))); + assertThat(bytesExpected).isEqualTo(IOUtils.readFullyToByteArray(input)); } } @@ -193,7 +189,7 @@ private void enhancedAssertEquals(FsAction actionA, FsAction actionB){ String errorMessageFormat = "expect FsAction is %s, whereas it is %s now."; String notEqualErrorMessage = String.format(errorMessageFormat, actionA.name(), actionB.name()); - assertEquals(notEqualErrorMessage, actionA, actionB); + assertEquals(actionA, actionB, notEqualErrorMessage); } private FTPFile getFTPFileOf(int access, FsAction action) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java index 4c6cf823a7659..aa27075e8163b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java @@ -25,8 +25,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.IOException; @@ -37,7 +37,7 @@ import java.nio.charset.StandardCharsets; import java.util.stream.IntStream; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Testing HttpFileSystem. @@ -45,7 +45,7 @@ public class TestHttpFileSystem { private final Configuration conf = new Configuration(false); - @Before + @BeforeEach public void setUp() { conf.set("fs.http.impl", HttpFileSystem.class.getCanonicalName()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java index f33da8aa8be65..c8730c4450f11 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java @@ -17,10 +17,12 @@ */ package org.apache.hadoop.fs.permission; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Tests covering basic functionality of the ACL objects. @@ -30,7 +32,7 @@ public class TestAcl { ENTRY7, ENTRY8, ENTRY9, ENTRY10, ENTRY11, ENTRY12, ENTRY13; private static AclStatus STATUS1, STATUS2, STATUS3, STATUS4; - @BeforeClass + @BeforeAll public static void setUp() { // named user AclEntry.Builder aclEntryBuilder = new AclEntry.Builder() diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java index 0c5b415f28279..01c8339a07126 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java @@ -21,8 +21,10 @@ import org.apache.hadoop.conf.Configuration; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.apache.hadoop.fs.permission.FsAction.*; @@ -252,8 +254,8 @@ public void testBadUmasks() { FsPermission.getUMask(conf); fail("Shouldn't have been able to parse bad umask"); } catch(IllegalArgumentException iae) { - assertTrue("Exception should specify parsing error and invalid umask: " - + iae.getMessage(), isCorrectExceptionMessage(iae.getMessage(), b)); + assertTrue(isCorrectExceptionMessage(iae.getMessage(), b), + "Exception should specify parsing error and invalid umask: " + iae.getMessage()); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java index 31cacf786d805..b2a505ff748a4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java @@ -24,8 +24,8 @@ import org.apache.hadoop.io.DataOutputBuffer; import static org.apache.hadoop.fs.FSProtos.*; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Verify PB serialization of FS data structures. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java index e425c2dea284a..b2b0923ad492d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java @@ -44,18 +44,19 @@ import org.apache.sshd.sftp.server.SftpSubsystemFactory; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestName; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; public class TestSFTPFileSystem { @@ -63,8 +64,6 @@ public class TestSFTPFileSystem { private static final String TEST_ROOT_DIR = GenericTestUtils.getTestDir().getAbsolutePath(); - @Rule public TestName name = new TestName(); - private static final String connection = "sftp://user:password@localhost"; private static Path localDir = null; private static FileSystem localFs = null; @@ -102,12 +101,12 @@ public boolean authenticate(String username, String password, port = sshd.getPort(); } - @Before + @BeforeEach public void init() throws Exception { sftpFs = FileSystem.get(URI.create(connection), conf); } - @After + @AfterEach public void cleanUp() throws Exception { if (sftpFs != null) { try { @@ -118,7 +117,7 @@ public void cleanUp() throws Exception { } } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { // skip all tests if running on Windows assumeNotWindows(); @@ -138,7 +137,7 @@ public static void setUp() throws Exception { localFs.mkdirs(localDir); } - @AfterClass + @AfterAll public static void tearDown() { if (localFs != null) { try { @@ -185,8 +184,8 @@ private static final Path touch(FileSystem fs, String filename, byte[] data) * @throws Exception */ @Test - public void testCreateFile() throws Exception { - Path file = touch(sftpFs, name.getMethodName().toLowerCase()); + public void testCreateFile(TestInfo testInfo) throws Exception { + Path file = touch(sftpFs, testInfo.getDisplayName().toLowerCase()); assertTrue(localFs.exists(file)); assertTrue(sftpFs.delete(file, false)); assertFalse(localFs.exists(file)); @@ -201,8 +200,8 @@ public void testCreateFile() throws Exception { * @throws Exception */ @Test - public void testFileExists() throws Exception { - Path file = touch(localFs, name.getMethodName().toLowerCase()); + public void testFileExists(TestInfo testInfo) throws Exception { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase()); assertTrue(sftpFs.exists(file)); assertTrue(localFs.exists(file)); assertTrue(sftpFs.delete(file, false)); @@ -219,9 +218,9 @@ public void testFileExists() throws Exception { * @throws Exception */ @Test - public void testReadFile() throws Exception { + public void testReadFile(TestInfo testInfo) throws Exception { byte[] data = "yaks".getBytes(); - Path file = touch(localFs, name.getMethodName().toLowerCase(), data); + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase(), data); FSDataInputStream is = null; try { is = sftpFs.open(file); @@ -245,9 +244,9 @@ public void testReadFile() throws Exception { * @throws Exception */ @Test - public void testStatFile() throws Exception { + public void testStatFile(TestInfo testInfo) throws Exception { byte[] data = "yaks".getBytes(); - Path file = touch(localFs, name.getMethodName().toLowerCase(), data); + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase(), data); FileStatus lstat = localFs.getFileStatus(file); FileStatus sstat = sftpFs.getFileStatus(file); @@ -268,13 +267,14 @@ public void testStatFile() throws Exception { * * @throws Exception */ - @Test(expected=java.io.IOException.class) - public void testDeleteNonEmptyDir() throws Exception { - Path file = touch(localFs, name.getMethodName().toLowerCase()); - sftpFs.delete(localDir, false); - assertThat( - ((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()) - .isEqualTo(1); + @Test + public void testDeleteNonEmptyDir(TestInfo testInfo) throws Exception { + assertThrows(IOException.class, () -> { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase()); + sftpFs.delete(localDir, false); + assertThat(((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()). + isEqualTo(1); + }); } /** @@ -283,8 +283,8 @@ public void testDeleteNonEmptyDir() throws Exception { * @throws Exception */ @Test - public void testDeleteNonExistFile() throws Exception { - Path file = new Path(localDir, name.getMethodName().toLowerCase()); + public void testDeleteNonExistFile(TestInfo testInfo) throws Exception { + Path file = new Path(localDir, testInfo.getDisplayName().toLowerCase()); assertFalse(sftpFs.delete(file, false)); assertThat( ((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()) @@ -297,10 +297,10 @@ public void testDeleteNonExistFile() throws Exception { * @throws Exception */ @Test - public void testRenameFile() throws Exception { + public void testRenameFile(TestInfo testInfo) throws Exception { byte[] data = "dingos".getBytes(); - Path file1 = touch(localFs, name.getMethodName().toLowerCase() + "1"); - Path file2 = new Path(localDir, name.getMethodName().toLowerCase() + "2"); + Path file1 = touch(localFs, testInfo.getDisplayName().toLowerCase() + "1"); + Path file2 = new Path(localDir, testInfo.getDisplayName().toLowerCase() + "2"); assertTrue(sftpFs.rename(file1, file2)); @@ -321,11 +321,13 @@ public void testRenameFile() throws Exception { * * @throws Exception */ - @Test(expected=java.io.IOException.class) - public void testRenameNonExistFile() throws Exception { - Path file1 = new Path(localDir, name.getMethodName().toLowerCase() + "1"); - Path file2 = new Path(localDir, name.getMethodName().toLowerCase() + "2"); - sftpFs.rename(file1, file2); + @Test + public void testRenameNonExistFile(TestInfo testInfo) throws Exception { + assertThrows(IOException.class, ()->{ + Path file1 = new Path(localDir, testInfo.getDisplayName().toLowerCase() + "1"); + Path file2 = new Path(localDir, testInfo.getDisplayName().toLowerCase() + "2"); + sftpFs.rename(file1, file2); + }); } /** @@ -333,16 +335,18 @@ public void testRenameNonExistFile() throws Exception { * * @throws Exception */ - @Test(expected=java.io.IOException.class) - public void testRenamingFileOntoExistingFile() throws Exception { - Path file1 = touch(localFs, name.getMethodName().toLowerCase() + "1"); - Path file2 = touch(localFs, name.getMethodName().toLowerCase() + "2"); - sftpFs.rename(file1, file2); + @Test + public void testRenamingFileOntoExistingFile(TestInfo testInfo) throws Exception { + assertThrows(IOException.class, ()->{ + Path file1 = touch(localFs, testInfo.getDisplayName().toLowerCase() + "1"); + Path file2 = touch(localFs, testInfo.getDisplayName().toLowerCase() + "2"); + sftpFs.rename(file1, file2); + }); } @Test - public void testGetAccessTime() throws IOException { - Path file = touch(localFs, name.getMethodName().toLowerCase()); + public void testGetAccessTime(TestInfo testInfo) throws IOException { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase()); LocalFileSystem local = (LocalFileSystem)localFs; java.nio.file.Path path = (local).pathToFile(file).toPath(); long accessTime1 = Files.readAttributes(path, BasicFileAttributes.class) @@ -357,8 +361,8 @@ public void testGetAccessTime() throws IOException { } @Test - public void testGetModifyTime() throws IOException { - Path file = touch(localFs, name.getMethodName().toLowerCase() + "1"); + public void testGetModifyTime(TestInfo testInfo) throws IOException { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase() + "1"); java.io.File localFile = ((LocalFileSystem) localFs).pathToFile(file); long modifyTime1 = localFile.lastModified(); // SFTPFileSystem doesn't have milliseconds. Excluding it. @@ -371,9 +375,9 @@ public void testGetModifyTime() throws IOException { } @Test - public void testMkDirs() throws IOException { + public void testMkDirs(TestInfo testInfo) throws IOException { Path path = new Path(localDir.toUri().getPath(), - new Path(name.getMethodName(), "subdirectory")); + new Path(testInfo.getDisplayName(), "subdirectory")); sftpFs.mkdirs(path); assertTrue(localFs.exists(path)); assertTrue(localFs.getFileStatus(path).isDirectory()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java index 5637e70f32fa5..4d6ef8f5e8557 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.net.URI; @@ -42,55 +44,49 @@ import org.apache.hadoop.ipc.RpcNoSuchMethodException; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ToolRunner; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; public class TestAclCommands { - @Rule - public TemporaryFolder testFolder = new TemporaryFolder(); private String path; private Configuration conf = null; - @Before - public void setup() throws IOException { + @BeforeEach + public void setup(@TempDir java.nio.file.Path testFolder) throws IOException { conf = new Configuration(); - path = testFolder.newFile("file").getPath(); + path = testFolder.resolve("file").toFile().getPath(); } @Test public void testGetfaclValidations() throws Exception { - assertFalse("getfacl should fail without path", - 0 == runCommand(new String[] {"-getfacl"})); - assertFalse("getfacl should fail with extra argument", - 0 == runCommand(new String[] {"-getfacl", path, "extraArg"})); + assertFalse(0 == runCommand(new String[] {"-getfacl"}), "getfacl should fail without path"); + assertFalse(0 == runCommand(new String[] {"-getfacl", path, "extraArg"}), + "getfacl should fail with extra argument"); } @Test public void testSetfaclValidations() throws Exception { - assertFalse("setfacl should fail without options", - 0 == runCommand(new String[] {"-setfacl", path})); - assertFalse("setfacl should fail without options -b, -k, -m, -x or --set", - 0 == runCommand(new String[] {"-setfacl", "-R", path})); - assertFalse("setfacl should fail without path", - 0 == runCommand(new String[] {"-setfacl"})); - assertFalse("setfacl should fail without aclSpec", - 0 == runCommand(new String[] {"-setfacl", "-m", path})); - assertFalse("setfacl should fail with conflicting options", - 0 == runCommand(new String[] {"-setfacl", "-m", path})); - assertFalse("setfacl should fail with extra arguments", - 0 == runCommand(new String[] {"-setfacl", path, "extra"})); - assertFalse("setfacl should fail with extra arguments", - 0 == runCommand(new String[] {"-setfacl", "--set", - "default:user::rwx", path, "extra"})); - assertFalse("setfacl should fail with permissions for -x", - 0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", - path})); - assertFalse("setfacl should fail ACL spec missing", - 0 == runCommand(new String[] {"-setfacl", "-m", "", path})); + assertFalse(0 == runCommand(new String[] {"-setfacl", path}), + "setfacl should fail without options"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-R", path}), + "setfacl should fail without options -b, -k, -m, -x or --set"); + assertFalse(0 == runCommand(new String[] {"-setfacl"}), + "setfacl should fail without path"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), + "setfacl should fail without aclSpec"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), + "setfacl should fail with conflicting options"); + assertFalse(0 == runCommand(new String[] {"-setfacl", path, "extra"}), + "setfacl should fail with extra arguments"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "--set", + "default:user::rwx", path, "extra"}), "setfacl should fail with extra arguments"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", + path}), "setfacl should fail with permissions for -x"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", "", path}), + "setfacl should fail ACL spec missing"); } @Test @@ -101,9 +97,8 @@ public void testSetfaclValidationsWithoutPermissions() throws Exception { } catch (IllegalArgumentException e) { } assertTrue(parsedList.size() == 0); - assertFalse("setfacl should fail with less arguments", - 0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:", - "/path" })); + assertFalse(0 == runCommand(new String[]{"-setfacl", "-m", "user:user1:", + "/path"}), "setfacl should fail with less arguments"); } @Test @@ -129,7 +124,7 @@ public void testMultipleAclSpecParsing() throws Exception { expectedList.add(user2Acl); expectedList.add(group1Acl); expectedList.add(defaultAcl); - assertEquals("Parsed Acl not correct", expectedList, parsedList); + assertEquals(expectedList, parsedList, "Parsed Acl not correct"); } @Test @@ -160,7 +155,7 @@ public void testMultipleAclSpecParsingWithoutPermissions() throws Exception { expectedList.add(other); expectedList.add(defaultUser); expectedList.add(defaultMask); - assertEquals("Parsed Acl not correct", expectedList, parsedList); + assertEquals(expectedList, parsedList, "Parsed Acl not correct"); } @Test @@ -169,8 +164,8 @@ public void testLsNoRpcForGetAclStatus() throws Exception { conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); conf.setBoolean("stubfs.noRpcForGetAclStatus", true); - assertEquals("ls must succeed even if getAclStatus RPC does not exist.", - 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" })); + assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[]{"-ls", "/"}), + "ls must succeed even if getAclStatus RPC does not exist."); } @Test @@ -178,8 +173,8 @@ public void testLsAclsUnsupported() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); - assertEquals("ls must succeed even if FileSystem does not implement ACLs.", - 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" })); + assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[]{"-ls", "/"}), + "ls must succeed even if FileSystem does not implement ACLs."); } public static class StubFileSystem extends FileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java index db7fc2488c848..9eda315fafd25 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java @@ -18,11 +18,14 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.hadoop.conf.Configuration; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestCommandFactory { static CommandFactory factory; @@ -31,7 +34,7 @@ public class TestCommandFactory { static void registerCommands(CommandFactory factory) { } - @Before + @BeforeEach public void testSetup() { factory = new CommandFactory(conf); assertNotNull(factory); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java index 9172f85eb9cb7..e9d5786419faa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java @@ -19,8 +19,20 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyBoolean; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.anyLong; +import static org.mockito.Mockito.anyShort; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.IOException; import java.io.InputStream; @@ -37,9 +49,9 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.shell.CopyCommands.Put; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.mockito.stubbing.OngoingStubbing; public class TestCopy { @@ -51,7 +63,7 @@ public class TestCopy { static PathData target; static FileStatus fileStat; - @BeforeClass + @BeforeAll public static void setup() throws IOException { conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -60,7 +72,7 @@ public static void setup() throws IOException { when(fileStat.isDirectory()).thenReturn(false); } - @Before + @BeforeEach public void resetMock() throws IOException { reset(mockFs); target = new PathData(path.toString(), conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java index 757c588104ea1..803f14e7c9561 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java @@ -21,11 +21,11 @@ import java.util.LinkedList; import java.util.concurrent.ThreadPoolExecutor; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; @@ -37,7 +37,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.shell.CopyCommands.CopyFromLocal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test for copyFromLocal. @@ -82,7 +83,7 @@ public static int initialize(Path dir) throws Exception { return numTotalFiles; } - @BeforeClass + @BeforeAll public static void init() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -95,13 +96,13 @@ public static void init() throws Exception { fs.setWorkingDirectory(testDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); } - @Before + @BeforeEach public void initDirectory() throws Exception { dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); numFiles = initialize(dir); @@ -113,14 +114,16 @@ private void run(CommandWithDestination cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocal() { run(new TestMultiThreadedCopy(1, 0), new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreads(){ int threads = Runtime.getRuntime().availableProcessors() * 2 + 1; run(new TestMultiThreadedCopy(threads, numFiles), @@ -129,7 +132,8 @@ public void testCopyFromLocalWithThreads(){ new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreadWrong(){ run(new TestMultiThreadedCopy(1, 0), "-t", "0", new Path(dir, FROM_DIR_NAME).toString(), @@ -150,7 +154,7 @@ private class TestMultiThreadedCopy extends CopyFromLocal { protected void processArguments(LinkedList args) throws IOException { // Check if the correct number of threads are spawned - Assert.assertEquals(expectedThreads, getThreadCount()); + assertEquals(expectedThreads, getThreadCount()); super.processArguments(args); if (isMultiThreadNecessary(args)) { @@ -159,10 +163,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assert.assertEquals(expectedCompletedTaskCount, + assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assert.assertEquals(0, executor.getActiveCount()); - Assert.assertTrue(executor.isTerminated()); + assertEquals(0, executor.getActiveCount()); + assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java index b68be243c956e..411a3f2582db9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java @@ -19,9 +19,10 @@ import java.io.IOException; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -37,8 +38,8 @@ import org.apache.hadoop.fs.shell.CopyCommands.Get; import org.apache.hadoop.fs.shell.CopyCommands.Put; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; public class TestCopyPreserveFlag { private static final int MODIFICATION_TIME = 12345000; @@ -59,7 +60,7 @@ public class TestCopyPreserveFlag { private Path testDir; private Configuration conf; - @Before + @BeforeEach public void initialize() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -86,7 +87,7 @@ public void initialize() throws Exception { fs.setTimes(DIR_FROM, MODIFICATION_TIME, ACCESS_TIME); } - @After + @AfterEach public void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); @@ -111,19 +112,22 @@ private void run(CommandWithDestination cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithP() throws Exception { run(new Put(), "-p", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithoutP() throws Exception { run(new Put(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithPQ() throws Exception { Put put = new Put(); run(put, "-p", "-q", "100", FROM.toString(), TO.toString()); @@ -131,7 +135,8 @@ public void testPutWithPQ() throws Exception { assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithQ() throws Exception { Put put = new Put(); run(put, "-q", "100", FROM.toString(), TO.toString()); @@ -139,7 +144,8 @@ public void testPutWithQ() throws Exception { assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithSplCharacter() throws Exception { fs.mkdirs(DIR_FROM_SPL); fs.createNewFile(FROM_SPL); @@ -147,37 +153,43 @@ public void testPutWithSplCharacter() throws Exception { assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocal() throws Exception { run(new CopyFromLocal(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreads() throws Exception { run(new CopyFromLocal(), "-t", "10", FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreadsPreserve() throws Exception { run(new CopyFromLocal(), "-p", "-t", "10", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithP() throws Exception { run(new Get(), "-p", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithoutP() throws Exception { run(new Get(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithPQ() throws Exception { Get get = new Get(); run(get, "-p", "-q", "100", FROM.toString(), TO.toString()); @@ -185,7 +197,8 @@ public void testGetWithPQ() throws Exception { assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithQ() throws Exception { Get get = new Get(); run(get, "-q", "100", FROM.toString(), TO.toString()); @@ -193,37 +206,43 @@ public void testGetWithQ() throws Exception { assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithThreads() throws Exception { run(new Get(), "-t", "10", FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithThreadsPreserve() throws Exception { run(new Get(), "-p", "-t", "10", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithP() throws Exception { run(new Cp(), "-p", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithoutP() throws Exception { run(new Cp(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDirectoryCpWithP() throws Exception { run(new Cp(), "-p", DIR_FROM.toString(), DIR_TO2.toString()); assertAttributesPreserved(DIR_TO2); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDirectoryCpWithoutP() throws Exception { run(new Cp(), DIR_FROM.toString(), DIR_TO2.toString()); assertAttributesChanged(DIR_TO2); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java index 202b81912c104..2ccbddde331dd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java @@ -21,11 +21,11 @@ import java.util.LinkedList; import java.util.concurrent.ThreadPoolExecutor; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; @@ -38,7 +38,8 @@ import org.apache.hadoop.fs.shell.CopyCommands.CopyToLocal; import static org.apache.hadoop.fs.shell.CopyCommandWithMultiThread.DEFAULT_QUEUE_SIZE; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestCopyToLocal { @@ -81,7 +82,7 @@ private static int initialize(Path dir) throws Exception { return numTotalFiles; } - @BeforeClass + @BeforeAll public static void init() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -94,7 +95,7 @@ public static void init() throws Exception { fs.setWorkingDirectory(testDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); @@ -105,13 +106,14 @@ private void run(CopyCommandWithMultiThread cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Before + @BeforeEach public void initDirectory() throws Exception { dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); numFiles = initialize(dir); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopy() throws Exception { MultiThreadedCopy copy = new MultiThreadedCopy(1, DEFAULT_QUEUE_SIZE, 0); run(copy, new Path(dir, FROM_DIR_NAME).toString(), @@ -119,21 +121,24 @@ public void testCopy() throws Exception { assert copy.getExecutor() == null; } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreads() { run(new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreadWrong() { run(new MultiThreadedCopy(1, DEFAULT_QUEUE_SIZE, 0), "-t", "0", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreadsAndQueueSize() { int queueSize = 256; run(new MultiThreadedCopy(5, queueSize, numFiles), "-t", "5", "-q", @@ -142,7 +147,8 @@ public void testCopyWithThreadsAndQueueSize() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreadsAndQueueSizeWrong() { int queueSize = 0; run(new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", "-q", @@ -151,7 +157,8 @@ public void testCopyWithThreadsAndQueueSizeWrong() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopySingleFile() throws Exception { Path fromDirPath = new Path(dir, FROM_DIR_NAME); Path subFile = new Path(fromDirPath, "file0"); @@ -186,9 +193,9 @@ private static class MultiThreadedCopy extends CopyToLocal { protected void processArguments(LinkedList args) throws IOException { // Check if the number of threads are same as expected - Assert.assertEquals(expectedThreads, getThreadCount()); + assertEquals(expectedThreads, getThreadCount()); // Check if the queue pool size of executor is same as expected - Assert.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); + assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); super.processArguments(args); @@ -198,10 +205,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assert.assertEquals(expectedCompletedTaskCount, + assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assert.assertEquals(0, executor.getActiveCount()); - Assert.assertTrue(executor.isTerminated()); + assertEquals(0, executor.getActiveCount()); + assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java index a2af500c30c9b..0e5a104f14e25 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java @@ -17,8 +17,16 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.PrintStream; import java.io.IOException; @@ -35,9 +43,9 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.shell.CommandFormat.NotEnoughArgumentsException; -import org.junit.Test; -import org.junit.Before; -import org.junit.BeforeClass; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; /** * JUnit test class for {@link org.apache.hadoop.fs.shell.Count} @@ -53,7 +61,7 @@ public class TestCount { private static FileSystem mockFs; private static FileStatus fileStat; - @BeforeClass + @BeforeAll public static void setup() { conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -62,7 +70,7 @@ public static void setup() { when(fileStat.isFile()).thenReturn(true); } - @Before + @BeforeEach public void resetMock() { reset(mockFs); } @@ -436,7 +444,7 @@ public void getCommandName() { Count count = new Count(); String actual = count.getCommandName(); String expected = "count"; - assertEquals("Count.getCommandName", expected, actual); + assertEquals(expected, actual, "Count.getCommandName"); } @Test @@ -444,7 +452,7 @@ public void isDeprecated() { Count count = new Count(); boolean actual = count.isDeprecated(); boolean expected = false; - assertEquals("Count.isDeprecated", expected, actual); + assertEquals(expected, actual, "Count.isDeprecated"); } @Test @@ -452,7 +460,7 @@ public void getReplacementCommand() { Count count = new Count(); String actual = count.getReplacementCommand(); String expected = null; - assertEquals("Count.getReplacementCommand", expected, actual); + assertEquals(expected, actual, "Count.getReplacementCommand"); } @Test @@ -460,7 +468,7 @@ public void getName() { Count count = new Count(); String actual = count.getName(); String expected = "count"; - assertEquals("Count.getName", expected, actual); + assertEquals(expected, actual, "Count.getName"); } @Test @@ -470,7 +478,7 @@ public void getUsage() { String expected = "-count [-q] [-h] [-v] [-t []]" + " [-u] [-x] [-e] [-s] ..."; - assertEquals("Count.getUsage", expected, actual); + assertEquals(expected, actual, "Count.getUsage"); } // check the correct description is returned @@ -504,7 +512,7 @@ public void getDescription() { + "The -e option shows the erasure coding policy." + "The -s option shows snapshot counts."; - assertEquals("Count.getDescription", expected, actual); + assertEquals(expected, actual, "Count.getDescription"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java index 214f1a0686cd9..72a180d1e2ffe 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java @@ -21,11 +21,11 @@ import java.util.LinkedList; import java.util.concurrent.ThreadPoolExecutor; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; @@ -38,7 +38,8 @@ import org.apache.hadoop.fs.shell.CopyCommands.Cp; import static org.apache.hadoop.fs.shell.CopyCommandWithMultiThread.DEFAULT_QUEUE_SIZE; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestCpCommand { @@ -81,7 +82,7 @@ private static int initialize(Path dir) throws Exception { return numTotalFiles; } - @BeforeClass + @BeforeAll public static void init() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -94,7 +95,7 @@ public static void init() throws Exception { fs.setWorkingDirectory(testDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); @@ -105,13 +106,14 @@ private void run(CopyCommandWithMultiThread cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Before + @BeforeEach public void initDirectory() throws Exception { dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); numFiles = initialize(dir); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCp() throws Exception { MultiThreadedCp copy = new MultiThreadedCp(1, DEFAULT_QUEUE_SIZE, 0); run(copy, new Path(dir, FROM_DIR_NAME).toString(), @@ -119,21 +121,24 @@ public void testCp() throws Exception { assert copy.getExecutor() == null; } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreads() { run(new MultiThreadedCp(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreadWrong() { run(new MultiThreadedCp(1, DEFAULT_QUEUE_SIZE, 0), "-t", "0", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreadsAndQueueSize() { int queueSize = 256; run(new MultiThreadedCp(5, queueSize, numFiles), "-t", "5", "-q", @@ -142,7 +147,8 @@ public void testCpWithThreadsAndQueueSize() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreadsAndQueueSizeWrong() { int queueSize = 0; run(new MultiThreadedCp(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", "-q", @@ -151,7 +157,8 @@ public void testCpWithThreadsAndQueueSizeWrong() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpSingleFile() throws Exception { Path fromDirPath = new Path(dir, FROM_DIR_NAME); Path subFile = new Path(fromDirPath, "file0"); @@ -186,9 +193,9 @@ private static class MultiThreadedCp extends Cp { protected void processArguments(LinkedList args) throws IOException { // Check if the number of threads are same as expected - Assert.assertEquals(expectedThreads, getThreadCount()); + assertEquals(expectedThreads, getThreadCount()); // Check if the queue pool size of executor is same as expected - Assert.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); + assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); super.processArguments(args); @@ -198,10 +205,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assert.assertEquals(expectedCompletedTaskCount, + assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assert.assertEquals(0, executor.getActiveCount()); - Assert.assertTrue(executor.isTerminated()); + assertEquals(0, executor.getActiveCount()); + assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java index 4a4f453d5e801..1d4fc461c1b9f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java @@ -19,9 +19,17 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -41,9 +49,9 @@ import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; /** @@ -56,7 +64,7 @@ public class TestLs { private static final Date NOW = new Date(); - @BeforeClass + @BeforeAll public static void setup() throws IOException { conf = new Configuration(); conf.set(FS_DEFAULT_NAME_KEY, "mockfs:///"); @@ -64,7 +72,7 @@ public static void setup() throws IOException { mockFs = mock(FileSystem.class); } - @Before + @BeforeEach public void resetMock() throws IOException, URISyntaxException { reset(mockFs); AclStatus mockAclStatus = mock(AclStatus.class); @@ -1113,7 +1121,7 @@ public void isDeprecated() { Ls ls = new Ls(); boolean actual = ls.isDeprecated(); boolean expected = false; - assertEquals("Ls.isDeprecated", expected, actual); + assertEquals(expected, actual, "Ls.isDeprecated"); } // check there's no replacement command @@ -1122,7 +1130,7 @@ public void getReplacementCommand() { Ls ls = new Ls(); String actual = ls.getReplacementCommand(); String expected = null; - assertEquals("Ls.getReplacementCommand", expected, actual); + assertEquals(expected, actual, "Ls.getReplacementCommand"); } // check the correct name is returned @@ -1131,36 +1139,40 @@ public void getName() { Ls ls = new Ls(); String actual = ls.getName(); String expected = "ls"; - assertEquals("Ls.getName", expected, actual); + assertEquals(expected, actual, "Ls.getName"); } - @Test(expected = UnsupportedOperationException.class) + @Test public void processPathFileDisplayECPolicyWhenUnsupported() throws IOException { - TestFile testFile = new TestFile("testDirectory", "testFile"); - LinkedList pathData = new LinkedList(); - pathData.add(testFile.getPathData()); - Ls ls = new Ls(); - LinkedList options = new LinkedList(); - options.add("-e"); - ls.processOptions(options); - ls.processArguments(pathData); + assertThrows(UnsupportedOperationException.class, ()->{ + TestFile testFile = new TestFile("testDirectory", "testFile"); + LinkedList pathData = new LinkedList(); + pathData.add(testFile.getPathData()); + Ls ls = new Ls(); + LinkedList options = new LinkedList(); + options.add("-e"); + ls.processOptions(options); + ls.processArguments(pathData); + }); } - @Test(expected = UnsupportedOperationException.class) + @Test public void processPathDirDisplayECPolicyWhenUnsupported() throws IOException { - TestFile testFile = new TestFile("testDirectory", "testFile"); - TestFile testDir = new TestFile("", "testDirectory"); - testDir.setIsDir(true); - testDir.addContents(testFile); - LinkedList pathData = new LinkedList(); - pathData.add(testDir.getPathData()); - Ls ls = new Ls(); - LinkedList options = new LinkedList(); - options.add("-e"); - ls.processOptions(options); - ls.processArguments(pathData); + assertThrows(UnsupportedOperationException.class, () -> { + TestFile testFile = new TestFile("testDirectory", "testFile"); + TestFile testDir = new TestFile("", "testDirectory"); + testDir.setIsDir(true); + testDir.addContents(testFile); + LinkedList pathData = new LinkedList(); + pathData.add(testDir.getPathData()); + Ls ls = new Ls(); + LinkedList options = new LinkedList(); + options.add("-e"); + ls.processOptions(options); + ls.processArguments(pathData); + }); } // test class representing a file to be listed diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java index b9e87d3dacefe..9b67ad9e6c20f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java @@ -18,9 +18,12 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.IOException; import java.net.URI; @@ -33,22 +36,22 @@ import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.PathExistsException; import org.apache.hadoop.fs.shell.CommandFormat.UnknownOptionException; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class TestMove { static Configuration conf; static FileSystem mockFs; - @BeforeClass + @BeforeAll public static void setup() throws IOException, URISyntaxException { mockFs = mock(FileSystem.class); conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); } - @Before + @BeforeEach public void resetMock() throws IOException { reset(mockFs); } @@ -91,14 +94,15 @@ public void testMoveTargetExistsWithoutExplicitRename() throws Exception { cmd.run(cmdargs); // make sure command failed with the proper exception - assertTrue("Rename should have failed with path exists exception", - cmd.error instanceof PathExistsException); + assertTrue(cmd.error instanceof PathExistsException, + "Rename should have failed with path exists exception"); } - @Test(expected = UnknownOptionException.class) + @Test public void testMoveFromLocalDoesNotAllowTOption() { - new MoveCommands.MoveFromLocal().run("-t", "2", - null, null); + assertThrows(UnknownOptionException.class, () -> { + new MoveCommands.MoveFromLocal().run("-t", "2", null, null); + }); } static class MockFileSystem extends FilterFileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java index 130ee5edee768..6d968981cd328 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java @@ -18,9 +18,9 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -32,10 +32,10 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestPathData { private static final String TEST_ROOT_DIR = @@ -44,7 +44,7 @@ public class TestPathData { protected FileSystem fs; protected Path testDir; - @Before + @BeforeEach public void initialize() throws Exception { conf = new Configuration(); fs = FileSystem.getLocal(conf); @@ -64,13 +64,14 @@ public void initialize() throws Exception { fs.create(new Path("d2","f3")); } - @After + @AfterEach public void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testWithDirStringAndConf() throws Exception { String dirString = "d1"; PathData item = new PathData(dirString, conf); @@ -83,7 +84,8 @@ public void testWithDirStringAndConf() throws Exception { checkPathData(dirString, item); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testUnqualifiedUriContents() throws Exception { String dirString = "d1"; PathData item = new PathData(dirString, conf); @@ -94,7 +96,8 @@ public void testUnqualifiedUriContents() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testQualifiedUriContents() throws Exception { String dirString = fs.makeQualified(new Path("d1")).toString(); PathData item = new PathData(dirString, conf); @@ -105,7 +108,8 @@ public void testQualifiedUriContents() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCwdContents() throws Exception { String dirString = Path.CUR_DIR; PathData item = new PathData(dirString, conf); @@ -116,7 +120,8 @@ public void testCwdContents() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testToFile() throws Exception { PathData item = new PathData(".", conf); assertEquals(new File(testDir.toString()), item.toFile()); @@ -126,7 +131,8 @@ public void testToFile() throws Exception { assertEquals(new File(testDir + "/d1/f1"), item.toFile()); } - @Test (timeout = 5000) + @Test + @Timeout(value = 5) public void testToFileRawWindowsPaths() throws Exception { assumeWindows(); @@ -153,7 +159,8 @@ public void testToFileRawWindowsPaths() throws Exception { assertEquals(new File(testDir + "\\foo\\bar"), item.toFile()); } - @Test (timeout = 5000) + @Test + @Timeout(value = 5) public void testInvalidWindowsPath() throws Exception { assumeWindows(); @@ -171,7 +178,8 @@ public void testInvalidWindowsPath() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testAbsoluteGlob() throws Exception { PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf); assertEquals( @@ -199,7 +207,8 @@ public void testAbsoluteGlob() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRelativeGlob() throws Exception { PathData[] items = PathData.expandAsGlob("d1/f1*", conf); assertEquals( @@ -208,7 +217,8 @@ public void testRelativeGlob() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRelativeGlobBack() throws Exception { fs.setWorkingDirectory(new Path("d1")); PathData[] items = PathData.expandAsGlob("../d2/*", conf); @@ -226,7 +236,7 @@ public void testGlobThrowsExceptionForUnreadableDir() throws Exception { fs.setPermission(obscuredDir, new FsPermission((short)0)); //no access try { PathData.expandAsGlob("foo/*", conf); - Assert.fail("Should throw IOException"); + fail("Should throw IOException"); } catch (IOException ioe) { // expected } finally { @@ -235,7 +245,8 @@ public void testGlobThrowsExceptionForUnreadableDir() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testWithStringAndConfForBuggyPath() throws Exception { String dirString = "file:///tmp"; Path tmpDir = new Path(dirString); @@ -249,13 +260,11 @@ public void testWithStringAndConfForBuggyPath() throws Exception { } public void checkPathData(String dirString, PathData item) throws Exception { - assertEquals("checking fs", fs, item.fs); - assertEquals("checking string", dirString, item.toString()); - assertEquals("checking path", - fs.makeQualified(new Path(item.toString())), item.path - ); - assertTrue("checking exist", item.stat != null); - assertTrue("checking isDir", item.stat.isDirectory()); + assertEquals(fs, item.fs, "checking fs"); + assertEquals(dirString, item.toString(), "checking string"); + assertEquals(fs.makeQualified(new Path(item.toString())), item.path, "checking path"); + assertTrue(item.stat != null, "checking exist"); + assertTrue(item.stat.isDirectory(), "checking isDir"); } /* junit does a lousy job of comparing arrays diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java index d4f000576b066..41ece0a782447 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java @@ -18,15 +18,15 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.ipc.RemoteException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestPathExceptions { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java index bb325b4832c10..49d5368bc3d36 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.shell; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java index 31a5a4ee17801..e50f60f41ece4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java @@ -18,12 +18,12 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.LinkedList; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Test class to verify Tail shell command. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java index e8520181a1642..efa24514e1e00 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.File; import java.io.FileOutputStream; @@ -36,7 +37,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.assertj.core.api.Assertions; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.Timeout; /** @@ -95,34 +96,40 @@ public void testEmptyAvroFile() throws Exception { Assertions.assertThat(output).describedAs("output").isEmpty(); } - @Test(expected = NullPointerException.class) + @Test public void testAvroFileInputStreamNullBuffer() throws Exception { - createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); - URI uri = new URI(AVRO_FILENAME); - Configuration conf = new Configuration(); - try (InputStream is = getInputStream(uri, conf)) { - is.read(null, 0, 10); - } + assertThrows(NullPointerException.class, () -> { + createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); + URI uri = new URI(AVRO_FILENAME); + Configuration conf = new Configuration(); + try (InputStream is = getInputStream(uri, conf)) { + is.read(null, 0, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testAvroFileInputStreamNegativePosition() throws Exception { - createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); - URI uri = new URI(AVRO_FILENAME); - Configuration conf = new Configuration(); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], -1, 10); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); + URI uri = new URI(AVRO_FILENAME); + Configuration conf = new Configuration(); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], -1, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testAvroFileInputStreamTooLong() throws Exception { - createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); - URI uri = new URI(AVRO_FILENAME); - Configuration conf = new Configuration(); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], 0, 11); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); + URI uri = new URI(AVRO_FILENAME); + Configuration conf = new Configuration(); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], 0, 11); + } + }); } @Test @@ -223,34 +230,40 @@ public void testEmptySequenceFile() throws Exception { Assertions.assertThat(output).describedAs("output").isEmpty(); } - @Test(expected = NullPointerException.class) + @Test public void testSequenceFileInputStreamNullBuffer() throws Exception { - Configuration conf = new Configuration(); - createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); - URI uri = new URI(SEQUENCE_FILENAME); - try (InputStream is = getInputStream(uri, conf)) { - is.read(null, 0, 10); - } + assertThrows(NullPointerException.class, () -> { + Configuration conf = new Configuration(); + createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); + URI uri = new URI(SEQUENCE_FILENAME); + try (InputStream is = getInputStream(uri, conf)) { + is.read(null, 0, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testSequenceFileInputStreamNegativePosition() throws Exception { - Configuration conf = new Configuration(); - createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); - URI uri = new URI(SEQUENCE_FILENAME); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], -1, 10); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + Configuration conf = new Configuration(); + createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); + URI uri = new URI(SEQUENCE_FILENAME); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], -1, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testSequenceFileInputStreamTooLong() throws Exception { - Configuration conf = new Configuration(); - createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); - URI uri = new URI(SEQUENCE_FILENAME); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], 0, 11); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + Configuration conf = new Configuration(); + createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); + URI uri = new URI(SEQUENCE_FILENAME); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], 0, 11); + } + }); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java index af0a2c352d267..522cbf56de1ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -27,9 +27,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestXAttrCommands { private final ByteArrayOutputStream errContent = @@ -37,7 +37,7 @@ public class TestXAttrCommands { private Configuration conf = null; private PrintStream initialStdErr; - @Before + @BeforeEach public void setup() throws IOException { errContent.reset(); initialStdErr = System.err; @@ -45,7 +45,7 @@ public void setup() throws IOException { conf = new Configuration(); } - @After + @AfterEach public void cleanUp() throws Exception { errContent.reset(); System.setErr(initialStdErr); @@ -54,41 +54,41 @@ public void cleanUp() throws Exception { @Test public void testGetfattrValidations() throws Exception { errContent.reset(); - assertFalse("getfattr should fail without path", - 0 == runCommand(new String[] { "-getfattr", "-d"})); + assertFalse(0 == runCommand(new String[]{"-getfattr", "-d"}), + "getfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse("getfattr should fail with extra argument", - 0 == runCommand(new String[] { "-getfattr", "extra", "-d", "/test"})); + assertFalse(0 == runCommand(new String[]{"-getfattr", "extra", "-d", "/test"}), + "getfattr should fail with extra argument"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse("getfattr should fail without \"-n name\" or \"-d\"", - 0 == runCommand(new String[] { "-getfattr", "/test"})); + assertFalse(0 == runCommand(new String[]{"-getfattr", "/test"}), + "getfattr should fail without \"-n name\" or \"-d\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-d' option")); errContent.reset(); - assertFalse("getfattr should fail with invalid encoding", - 0 == runCommand(new String[] { "-getfattr", "-d", "-e", "aaa", "/test"})); + assertFalse(0 == runCommand(new String[]{"-getfattr", "-d", "-e", "aaa", "/test"}), + "getfattr should fail with invalid encoding"); assertTrue(errContent.toString().contains("Invalid/unsupported encoding option specified: aaa")); } @Test public void testSetfattrValidations() throws Exception { errContent.reset(); - assertFalse("setfattr should fail without path", - 0 == runCommand(new String[] { "-setfattr", "-n", "user.a1" })); + assertFalse(0 == runCommand(new String[]{"-setfattr", "-n", "user.a1"}), + "setfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse("setfattr should fail with extra arguments", - 0 == runCommand(new String[] { "-setfattr", "extra", "-n", "user.a1", "/test"})); + assertFalse(0 == runCommand(new String[]{"-setfattr", "extra", "-n", "user.a1", "/test"}), + "setfattr should fail with extra arguments"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse("setfattr should fail without \"-n name\" or \"-x name\"", - 0 == runCommand(new String[] { "-setfattr", "/test"})); + assertFalse(0 == runCommand(new String[]{"-setfattr", "/test"}), + "setfattr should fail without \"-n name\" or \"-x name\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-x name' option")); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java index 9111062ef00a3..2fec2db028c7b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java @@ -18,24 +18,23 @@ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Deque; import java.util.LinkedList; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestAnd { - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - // test all expressions passing @Test public void testPass() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java index b03be79b03165..692c6db78663c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java @@ -17,28 +17,31 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Deque; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestFilterExpression { private Expression expr; private FilterExpression test; - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - - @Before + @BeforeEach public void setup() { expr = mock(Expression.class); test = new FilterExpression(expr) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java index 959dc59a270b8..e1b85356fd06a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java @@ -17,8 +17,19 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import java.io.IOException; import java.io.PrintStream; @@ -26,7 +37,6 @@ import java.util.Collections; import java.util.LinkedList; import java.util.NoSuchElementException; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -39,24 +49,21 @@ import org.apache.hadoop.fs.shell.find.Find; import org.apache.hadoop.fs.shell.find.FindOptions; import org.apache.hadoop.fs.shell.find.Result; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.InOrder; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +@Timeout(10) public class TestFind { - @Rule - public Timeout timeout = new Timeout(10000, TimeUnit.MILLISECONDS); - private static FileSystem mockFs; private static Configuration conf; - @Before + @BeforeEach public void setup() throws IOException { mockFs = MockFileSystem.setup(); conf = mockFs.getConf(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java index f6eafd77b5d2e..bd201abfc3887 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java @@ -17,27 +17,23 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.apache.hadoop.fs.shell.find.TestHelper.*; import java.io.IOException; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestIname { private FileSystem mockFs; private Name.Iname name; - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java index 8217655b523bb..967caebc81485 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java @@ -17,27 +17,23 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.apache.hadoop.fs.shell.find.TestHelper.*; import java.io.IOException; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestName { private FileSystem mockFs; private Name name; - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java index 5e861fc35f085..932689cbed4fa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java @@ -17,29 +17,27 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import java.io.IOException; import org.apache.hadoop.fs.shell.PathData; import java.io.PrintStream; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestPrint { private FileSystem mockFs; - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java index 94c5c403bec38..df4795bf87802 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java @@ -17,29 +17,26 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import java.io.IOException; import org.apache.hadoop.fs.shell.PathData; import java.io.PrintStream; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestPrint0 { private FileSystem mockFs; - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java index 058a0923a43a5..77d3d2b5e5d9d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.TimeUnit; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java index 5698a08c7e16b..b1e168027ac1f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Random; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,9 +32,9 @@ import static org.apache.hadoop.fs.store.DataBlocks.DATA_BLOCKS_BUFFER_ARRAY; import static org.apache.hadoop.fs.store.DataBlocks.DATA_BLOCKS_BUFFER_DISK; import static org.apache.hadoop.fs.store.DataBlocks.DATA_BLOCKS_BYTEBUFFER; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * UTs to test {@link DataBlocks} functionalities. @@ -86,13 +86,12 @@ private void assertWriteBlock(DataBlocks.DataBlock dataBlock) // Verify DataBlock state is at Writing. dataBlock.verifyState(DataBlocks.DataBlock.DestState.Writing); // Verify that the DataBlock has data written. - assertTrue("Expected Data block to have data", dataBlock.hasData()); + assertTrue(dataBlock.hasData(), "Expected Data block to have data"); // Verify the size of data. - assertEquals("Mismatch in data size in block", ONE_KB, - dataBlock.dataSize()); + assertEquals(ONE_KB, dataBlock.dataSize(), "Mismatch in data size in block"); // Verify that no capacity is left in the data block to write more. - assertFalse("Expected the data block to have no capacity to write 1 byte " - + "of data", dataBlock.hasCapacity(1)); + assertFalse(dataBlock.hasCapacity(1), + "Expected the data block to have no capacity to write 1 byte of data"); } /** @@ -110,8 +109,8 @@ private void assertToByteArray(DataBlocks.DataBlock dataBlock) byte[] bytesWritten = blockUploadData.toByteArray(); // Verify that we can call toByteArray() more than once and gives the // same byte[]. - assertEquals("Mismatch in byteArray provided by toByteArray() the second " - + "time", bytesWritten, blockUploadData.toByteArray()); + assertEquals(bytesWritten, blockUploadData.toByteArray(), + "Mismatch in byteArray provided by toByteArray() the second time"); IOUtils.close(blockUploadData); // Verify that after closing blockUploadData, we can't call toByteArray(). LambdaTestUtils.intercept(IllegalStateException.class, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java index ef9613f5af127..767e386626414 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -29,7 +29,7 @@ /** * Unit test of etag operations. */ -public class TestEtagChecksum extends Assert { +public class TestEtagChecksum extends Assertions { private final EtagChecksum empty1 = tag(""); private final EtagChecksum empty2 = tag(""); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java index 20825e312c9e5..8166201f3a711 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java @@ -23,6 +23,12 @@ import java.util.EnumSet; import static org.apache.hadoop.fs.FileContextTestHelper.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; @@ -33,10 +39,10 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.viewfs.ChRootedFs; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.Mockito; public class TestChRootedFs { @@ -45,7 +51,7 @@ public class TestChRootedFs { FileContext fcTarget; // Path chrootedTo; - @Before + @BeforeEach public void setUp() throws Exception { // create the test root on local_fs fcTarget = FileContext.getLocalFSFileContext(); @@ -62,7 +68,7 @@ public void setUp() throws Exception { new ChRootedFs(fcTarget.getDefaultFileSystem(), chrootedTo), conf); } - @After + @AfterEach public void tearDown() throws Exception { fcTarget.delete(chrootedTo, true); } @@ -71,11 +77,11 @@ public void tearDown() throws Exception { @Test public void testBasicPaths() { URI uri = fc.getDefaultFileSystem().getUri(); - Assert.assertEquals(chrootedTo.toUri(), uri); - Assert.assertEquals(fc.makeQualified( + assertEquals(chrootedTo.toUri(), uri); + assertEquals(fc.makeQualified( new Path(System.getProperty("user.home"))), fc.getWorkingDirectory()); - Assert.assertEquals(fc.makeQualified( + assertEquals(fc.makeQualified( new Path(System.getProperty("user.home"))), fc.getHomeDirectory()); /* @@ -85,13 +91,13 @@ public void testBasicPaths() { * But if we were to fix Path#makeQualified() then the next test should * have been: - Assert.assertEquals( + assertEquals( new Path(chrootedTo + "/foo/bar").makeQualified( FsConstants.LOCAL_FS_URI, null), fc.makeQualified(new Path( "/foo/bar"))); */ - Assert.assertEquals( + assertEquals( new Path("/foo/bar").makeQualified(FsConstants.LOCAL_FS_URI, null), fc.makeQualified(new Path("/foo/bar"))); } @@ -109,50 +115,50 @@ public void testCreateDelete() throws IOException { // Create file fileContextTestHelper.createFileNonRecursive(fc, "/foo"); - Assert.assertTrue(isFile(fc, new Path("/foo"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo, "foo"))); + assertTrue(isFile(fc, new Path("/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "foo"))); // Create file with recursive dir fileContextTestHelper.createFile(fc, "/newDir/foo"); - Assert.assertTrue(isFile(fc, new Path("/newDir/foo"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertTrue(isFile(fc, new Path("/newDir/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "newDir/foo"))); // Delete the created file - Assert.assertTrue(fc.delete(new Path("/newDir/foo"), false)); - Assert.assertFalse(exists(fc, new Path("/newDir/foo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertTrue(fc.delete(new Path("/newDir/foo"), false)); + assertFalse(exists(fc, new Path("/newDir/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/foo"))); // Create file with a 2 component dirs recursively fileContextTestHelper.createFile(fc, "/newDir/newDir2/foo"); - Assert.assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "newDir/newDir2/foo"))); // Delete the created file - Assert.assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); - Assert.assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); + assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/newDir2/foo"))); } @Test public void testMkdirDelete() throws IOException { fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX"), FileContext.DEFAULT_PERM, false); - Assert.assertTrue(isDir(fc, new Path("/dirX"))); - Assert.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX"))); + assertTrue(isDir(fc, new Path("/dirX"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo, "dirX"))); fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX/dirY"), FileContext.DEFAULT_PERM, false); - Assert.assertTrue(isDir(fc, new Path("/dirX/dirY"))); - Assert.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + assertTrue(isDir(fc, new Path("/dirX/dirY"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo, "dirX/dirY"))); // Delete the created dir - Assert.assertTrue(fc.delete(new Path("/dirX/dirY"), false)); - Assert.assertFalse(exists(fc, new Path("/dirX/dirY"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + assertTrue(fc.delete(new Path("/dirX/dirY"), false)); + assertFalse(exists(fc, new Path("/dirX/dirY"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "dirX/dirY"))); - Assert.assertTrue(fc.delete(new Path("/dirX"), false)); - Assert.assertFalse(exists(fc, new Path("/dirX"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX"))); + assertTrue(fc.delete(new Path("/dirX"), false)); + assertFalse(exists(fc, new Path("/dirX"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "dirX"))); } @Test @@ -160,23 +166,23 @@ public void testRename() throws IOException { // Rename a file fileContextTestHelper.createFile(fc, "/newDir/foo"); fc.rename(new Path("/newDir/foo"), new Path("/newDir/fooBar")); - Assert.assertFalse(exists(fc, new Path("/newDir/foo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); - Assert.assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/fooBar"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/fooBar"))); + assertFalse(exists(fc, new Path("/newDir/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/foo"))); + assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc, "/newDir/fooBar"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "newDir/fooBar"))); // Rename a dir fc.mkdir(new Path("/newDir/dirFoo"), FileContext.DEFAULT_PERM, false); fc.rename(new Path("/newDir/dirFoo"), new Path("/newDir/dirFooBar")); - Assert.assertFalse(exists(fc, new Path("/newDir/dirFoo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/dirFoo"))); - Assert.assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/dirFooBar"))); - Assert.assertTrue(isDir(fcTarget, new Path(chrootedTo,"newDir/dirFooBar"))); + assertFalse(exists(fc, new Path("/newDir/dirFoo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/dirFoo"))); + assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc, "/newDir/dirFooBar"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo, "newDir/dirFooBar"))); } - /** + /* * We would have liked renames across file system to fail but * Unfortunately there is not way to distinguish the two file systems * @throws IOException @@ -193,15 +199,15 @@ public void testRenameAcrossFs() throws IOException { public void testList() throws IOException { FileStatus fs = fc.getFileStatus(new Path("/")); - Assert.assertTrue(fs.isDirectory()); + assertTrue(fs.isDirectory()); // should return the full path not the chrooted path - Assert.assertEquals(fs.getPath(), chrootedTo); + assertEquals(fs.getPath(), chrootedTo); // list on Slash FileStatus[] dirPaths = fc.util().listStatus(new Path("/")); - Assert.assertEquals(0, dirPaths.length); + assertEquals(0, dirPaths.length); @@ -213,21 +219,21 @@ public void testList() throws IOException { fc.mkdir(new Path("/dirX/dirXX"), FileContext.DEFAULT_PERM, false); dirPaths = fc.util().listStatus(new Path("/")); - Assert.assertEquals(4, dirPaths.length); + assertEquals(4, dirPaths.length); // Note the the file status paths are the full paths on target fs = fileContextTestHelper.containsPath(fcTarget, "foo", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isFile()); + assertNotNull(fs); + assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "bar", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isFile()); + assertNotNull(fs); + assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "dirX", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isDirectory()); + assertNotNull(fs); + assertTrue(fs.isDirectory()); fs = fileContextTestHelper.containsPath(fcTarget, "dirY", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isDirectory()); + assertNotNull(fs); + assertTrue(fs.isDirectory()); } @Test @@ -238,13 +244,13 @@ public void testWorkingDirectory() throws Exception { Path workDir = new Path("/testWd"); Path fqWd = fc.makeQualified(workDir); fc.setWorkingDirectory(workDir); - Assert.assertEquals(fqWd, fc.getWorkingDirectory()); + assertEquals(fqWd, fc.getWorkingDirectory()); fc.setWorkingDirectory(new Path(".")); - Assert.assertEquals(fqWd, fc.getWorkingDirectory()); + assertEquals(fqWd, fc.getWorkingDirectory()); fc.setWorkingDirectory(new Path("..")); - Assert.assertEquals(fqWd.getParent(), fc.getWorkingDirectory()); + assertEquals(fqWd.getParent(), fc.getWorkingDirectory()); // cd using a relative path @@ -252,20 +258,20 @@ public void testWorkingDirectory() throws Exception { workDir = new Path("/testWd"); fqWd = fc.makeQualified(workDir); fc.setWorkingDirectory(workDir); - Assert.assertEquals(fqWd, fc.getWorkingDirectory()); + assertEquals(fqWd, fc.getWorkingDirectory()); Path relativeDir = new Path("existingDir1"); Path absoluteDir = new Path(workDir,"existingDir1"); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); Path fqAbsoluteDir = fc.makeQualified(absoluteDir); fc.setWorkingDirectory(relativeDir); - Assert.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); + assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); // cd using a absolute path absoluteDir = new Path("/test/existingDir2"); fqAbsoluteDir = fc.makeQualified(absoluteDir); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); fc.setWorkingDirectory(absoluteDir); - Assert.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); + assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); // Now open a file relative to the wd we just set above. Path absolutePath = new Path(absoluteDir, "foo"); @@ -274,12 +280,12 @@ public void testWorkingDirectory() throws Exception { // Now mkdir relative to the dir we cd'ed to fc.mkdir(new Path("newDir"), FileContext.DEFAULT_PERM, true); - Assert.assertTrue(isDir(fc, new Path(absoluteDir, "newDir"))); + assertTrue(isDir(fc, new Path(absoluteDir, "newDir"))); absoluteDir = fileContextTestHelper.getTestRootPath(fc, "nonexistingPath"); try { fc.setWorkingDirectory(absoluteDir); - Assert.fail("cd to non existing dir should have failed"); + fail("cd to non existing dir should have failed"); } catch (Exception e) { // Exception as expected } @@ -289,7 +295,7 @@ public void testWorkingDirectory() throws Exception { absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir"); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); fc.setWorkingDirectory(absoluteDir); - Assert.assertEquals(absoluteDir, fc.getWorkingDirectory()); + assertEquals(absoluteDir, fc.getWorkingDirectory()); } @@ -299,15 +305,17 @@ public void testWorkingDirectory() throws Exception { @Test public void testResolvePath() throws IOException { - Assert.assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); + assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); fileContextTestHelper.createFile(fc, "/foo"); - Assert.assertEquals(new Path(chrootedTo, "foo"), + assertEquals(new Path(chrootedTo, "foo"), fc.getDefaultFileSystem().resolvePath(new Path("/foo"))); } - @Test(expected=FileNotFoundException.class) + @Test public void testResolvePathNonExisting() throws IOException { + assertThrows(FileNotFoundException.class, () -> { fc.getDefaultFileSystem().resolvePath(new Path("/nonExisting")); + }); } @Test @@ -315,7 +323,7 @@ public void testIsValidNameValidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(true).when(baseFs).isValidName(Mockito.anyString()); - Assert.assertTrue(chRootedFs.isValidName("/test")); + assertTrue(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); } @@ -324,11 +332,12 @@ public void testIsValidNameInvalidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(false).when(baseFs).isValidName(Mockito.anyString()); - Assert.assertFalse(chRootedFs.isValidName("/test")); + assertFalse(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path( @@ -337,12 +346,13 @@ public void testCreateSnapshot() throws Exception { ChRootedFs chRootedFs = new ChRootedFs(baseFs, chrootedTo); Mockito.doReturn(snapRootPath).when(baseFs) .createSnapshot(chRootedSnapRootPath, "snap1"); - Assert.assertEquals(snapRootPath, + assertEquals(snapRootPath, chRootedFs.createSnapshot(snapRootPath, "snap1")); Mockito.verify(baseFs).createSnapshot(chRootedSnapRootPath, "snap1"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testDeleteSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path( @@ -355,7 +365,8 @@ public void testDeleteSnapshot() throws Exception { Mockito.verify(baseFs).deleteSnapshot(chRootedSnapRootPath, "snap1"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testRenameSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java index c567944ffe307..49bee32ff0bc9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.fs.viewfs; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test Regex Mount Point Interceptor Factory. @@ -34,7 +35,7 @@ public void testCreateNormalCase() { .toString(RegexMountPoint.INTERCEPTOR_INTERNAL_SEP) + "replace"; RegexMountPointInterceptor interceptor = RegexMountPointInterceptorFactory.create(replaceInterceptorStr); - Assert.assertTrue( + assertTrue( interceptor instanceof RegexMountPointResolvedDstPathReplaceInterceptor); } @@ -49,6 +50,6 @@ public void testCreateBadCase() { + "replace"; RegexMountPointInterceptor interceptor = RegexMountPointInterceptorFactory.create(replaceInterceptorStr); - Assert.assertTrue(interceptor == null); + assertTrue(interceptor == null); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java index 9fdf0f6ac9c5c..6327d8562c6d4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java @@ -19,10 +19,11 @@ import java.io.IOException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.viewfs.RegexMountPointInterceptorType.REPLACE_RESOLVED_DST_PATH; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; /** * Test RegexMountPointResolvedDstPathReplaceInterceptor. @@ -43,11 +44,11 @@ public void testDeserializeFromStringNormalCase() throws IOException { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = RegexMountPointResolvedDstPathReplaceInterceptor .deserializeFromString(serializedString); - Assert.assertEquals(srcRegex, interceptor.getSrcRegexString()); - Assert.assertEquals(replaceString, interceptor.getReplaceString()); - Assert.assertNull(interceptor.getSrcRegexPattern()); + assertEquals(srcRegex, interceptor.getSrcRegexString()); + assertEquals(replaceString, interceptor.getReplaceString()); + assertNull(interceptor.getSrcRegexPattern()); interceptor.initialize(); - Assert.assertEquals(srcRegex, + assertEquals(srcRegex, interceptor.getSrcRegexPattern().toString()); } @@ -60,7 +61,7 @@ public void testDeserializeFromStringBadCase() throws IOException { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = RegexMountPointResolvedDstPathReplaceInterceptor .deserializeFromString(serializedString); - Assert.assertNull(interceptor); + assertNull(interceptor); } @Test @@ -71,7 +72,7 @@ public void testSerialization() { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); - Assert.assertEquals(interceptor.serializeToString(), serializedString); + assertEquals(interceptor.serializeToString(), serializedString); } @Test @@ -82,7 +83,7 @@ public void testInterceptSource() { new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); String sourcePath = "/a/b/l3/dd"; - Assert.assertEquals(sourcePath, interceptor.interceptSource(sourcePath)); + assertEquals(sourcePath, interceptor.interceptSource(sourcePath)); } @Test @@ -95,7 +96,7 @@ public void testInterceptResolve() throws IOException { new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); interceptor.initialize(); - Assert.assertEquals("/user-hdfs", + assertEquals("/user-hdfs", interceptor.interceptResolvedDestPathStr(pathAfterResolution)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java index 1527e3c1f30d8..eb2b1f3a3fe43 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test the TestViewFSOverloadSchemeCentralMountTableConfig with mount-table @@ -36,7 +36,7 @@ public class TestViewFSOverloadSchemeCentralMountTableConfig private Path oldMountTablePath; private Path latestMountTablepath; - @Before + @BeforeEach public void setUp() throws Exception { super.setUp(); // Mount table name format: mount-table..xml diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java index 3a60d6ecdda94..613f3440c41de 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java @@ -31,11 +31,12 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.MockFileSystem; -import org.junit.*; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeAll; import static org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.getChildFileSystem; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.verify; /** * Verify that viewfs propagates certain methods to the underlying fs @@ -46,7 +47,7 @@ public class TestViewFileSystemDelegation { //extends ViewFileSystemTestSetup { static FakeFileSystem fs1; static FakeFileSystem fs2; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = ViewFileSystemTestSetup.createConfig(); setupFileSystem(new URI("fs1:/"), FakeFileSystem.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java index 239f47d1da6f3..4d90eabce1891 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.viewfs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.getChildFileSystem; import java.io.IOException; @@ -34,8 +34,8 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Test ViewFileSystem's support for having delegation tokens fetched and cached @@ -52,7 +52,7 @@ public class TestViewFileSystemDelegationTokenSupport { static FakeFileSystem fs1; static FakeFileSystem fs2; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = ViewFileSystemTestSetup.createConfig(); setupFileSystem(new URI("fs1:///"), FakeFileSystem.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java index 1e86a91c141c1..f245109b92fae 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java @@ -29,13 +29,18 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + /** * * Test the TestViewFileSystemOverloadSchemeLF using a file with authority: @@ -51,7 +56,7 @@ public class TestViewFileSystemOverloadSchemeLocalFileSystem { private FileSystemTestHelper fileSystemTestHelper = new FileSystemTestHelper(); - @Before + @BeforeEach public void setUp() throws Exception { conf = new Configuration(); conf.set(String.format("fs.%s.impl", FILE), @@ -94,7 +99,7 @@ public void testLocalTargetLinkWriteSimple() } try (FSDataInputStream lViewIs = lViewFs.open(testPath)) { - Assert.assertEquals(testString, lViewIs.readUTF()); + assertEquals(testString, lViewIs.readUTF()); } } } @@ -111,9 +116,9 @@ public void testLocalFsCreateAndDelete() throws Exception { try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) { Path testPath = new Path(mountURI.toString() + "/lfsroot/test"); lViewFS.createNewFile(testPath); - Assert.assertTrue(lViewFS.exists(testPath)); + assertTrue(lViewFS.exists(testPath)); lViewFS.delete(testPath, true); - Assert.assertFalse(lViewFS.exists(testPath)); + assertFalse(lViewFS.exists(testPath)); } } @@ -131,7 +136,7 @@ public void testLocalFsLinkSlashMerge() throws Exception { try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) { Path fileOnRoot = new Path(mountURI.toString() + "/NewFile"); lViewFS.createNewFile(fileOnRoot); - Assert.assertTrue(lViewFS.exists(fileOnRoot)); + assertTrue(lViewFS.exists(fileOnRoot)); } } @@ -139,18 +144,20 @@ public void testLocalFsLinkSlashMerge() throws Exception { * Tests with linkMergeSlash and other mounts in * ViewFileSystemOverloadScheme. */ - @Test(expected = IOException.class) + @Test public void testLocalFsLinkSlashMergeWithOtherMountLinks() throws Exception { - LOG.info("Starting testLocalFsLinkSlashMergeWithOtherMountLinks"); - addMountLinks("mt", - new String[] {"/lfsroot", Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH }, - new String[] {targetTestRoot + "/wd2", targetTestRoot + "/wd2" }, conf); - final URI mountURI = URI.create("file://mt/"); - FileSystem.get(mountURI, conf); - Assert.fail("A merge slash cannot be configured with other mount links."); + assertThrows(IOException.class, ()->{ + LOG.info("Starting testLocalFsLinkSlashMergeWithOtherMountLinks"); + addMountLinks("mt", + new String[] {"/lfsroot", Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH }, + new String[] {targetTestRoot + "/wd2", targetTestRoot + "/wd2" }, conf); + final URI mountURI = URI.create("file://mt/"); + FileSystem.get(mountURI, conf); + fail("A merge slash cannot be configured with other mount links."); + }); } - @After + @AfterEach public void tearDown() throws Exception { if (null != fsTarget) { fsTarget.delete(fileSystemTestHelper.getTestRootPath(fsTarget), true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java index 9d7c58f8197b3..150fed4c80c81 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java @@ -25,41 +25,45 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileAlreadyExistsException; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public class TestViewFsConfig { - @Test(expected = FileAlreadyExistsException.class) + @Test public void testInvalidConfig() throws IOException, URISyntaxException { - Configuration conf = new Configuration(); - ConfigUtil.setIsNestedMountPointSupported(conf, false); - ConfigUtil.addLink(conf, "/internalDir/linkToDir2", - new Path("file:///dir2").toUri()); - ConfigUtil.addLink(conf, "/internalDir/linkToDir2/linkToDir3", - new Path("file:///dir3").toUri()); + assertThrows(FileAlreadyExistsException.class, ()-> { + Configuration conf = new Configuration(); + ConfigUtil.setIsNestedMountPointSupported(conf, false); + ConfigUtil.addLink(conf, "/internalDir/linkToDir2", + new Path("file:///dir2").toUri()); + ConfigUtil.addLink(conf, "/internalDir/linkToDir2/linkToDir3", + new Path("file:///dir3").toUri()); - class Foo { - } + class Foo { + } - new InodeTree(conf, null, null, false) { + new InodeTree(conf, null, null, false) { - @Override - protected Function initAndGetTargetFs() { - return null; - } + @Override + protected Function initAndGetTargetFs() { + return null; + } - @Override - protected Foo getTargetFileSystem(final INodeDir dir) { - return null; - } + @Override + protected Foo getTargetFileSystem(final INodeDir dir) { + return null; + } - @Override - protected Foo getTargetFileSystem(final String settings, - final URI[] mergeFsURIList) { - return null; - } + @Override + protected Foo getTargetFileSystem(final String settings, + final URI[] mergeFsURIList) { + return null; + } - }; + }; + }); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java index 06cbdab8d210f..f4e57800c7c93 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java @@ -29,12 +29,11 @@ import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.TrashPolicyDefault; import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*; import static org.apache.hadoop.fs.viewfs.Constants.*; -import static org.junit.Assert.*; public class TestViewFsTrash { FileSystem fsTarget; // the target file system - the mount will point here @@ -42,7 +41,7 @@ public class TestViewFsTrash { Configuration conf; private FileSystemTestHelper fileSystemTestHelper; - @Before + @BeforeEach public void setUp() throws Exception { Configuration targetFSConf = new Configuration(); targetFSConf.setClass("fs.file.impl", TestTrash.TestLFS.class, FileSystem.class); @@ -62,7 +61,7 @@ public void setUp() throws Exception { } - @After + @AfterEach public void tearDown() throws Exception { ViewFileSystemTestSetup.tearDown(fileSystemTestHelper, fsTarget); fsTarget.delete(new Path(fsTarget.getHomeDirectory(), ".Trash/Current"), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java index 6bc014ab8929f..8a6d0a0b9458a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java @@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FsConstants; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestViewFsURIs { @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java index 8ac447eb02e9b..068fd12952e12 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java @@ -33,13 +33,15 @@ import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * The FileStatus is being serialized in MR as jobs are submitted. @@ -51,13 +53,13 @@ public class TestViewfsFileStatus { private static final File TEST_DIR = GenericTestUtils.getTestDir( TestViewfsFileStatus.class.getSimpleName()); - @Before + @BeforeEach public void setUp() { FileUtil.fullyDelete(TEST_DIR); assertTrue(TEST_DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(TEST_DIR); } @@ -83,9 +85,9 @@ public void testFileStatusSerialziation() FileStatus stat = vfs.getFileStatus(path); assertEquals(content.length, stat.getLen()); ContractTestUtils.assertNotErasureCoded(vfs, path); - assertTrue(path + " should have erasure coding unset in " + - "FileStatus#toString(): " + stat, - stat.toString().contains("isErasureCoded=false")); + assertTrue(stat.toString().contains("isErasureCoded=false"), + path + " should have erasure coding unset in " + + "FileStatus#toString(): " + stat); // check serialization/deserialization DataOutputBuffer dob = new DataOutputBuffer(); @@ -180,7 +182,7 @@ public void testGetFileChecksum() throws IOException { Mockito.verify(mockFS).getFileChecksum(new Path("someFile")); } - @AfterClass + @AfterAll public static void cleanup() throws IOException { FileUtil.fullyDelete(TEST_DIR); }