diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupMajorCompactionTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupMajorCompactionTTL.java index 69f230c7d05c..9e542449fb78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupMajorCompactionTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupMajorCompactionTTL.java @@ -17,37 +17,34 @@ */ package org.apache.hadoop.hbase.rsgroup; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.ServerManager; -import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RSGroupTests; import org.apache.hadoop.hbase.util.compaction.TestMajorCompactorTTL; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ RSGroupTests.class, LargeTests.class }) +@Tag(RSGroupTests.TAG) +@Tag(MediumTests.TAG) public class TestRSGroupMajorCompactionTTL extends TestMajorCompactorTTL { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRSGroupMajorCompactionTTL.class); private final static int NUM_SLAVES_BASE = 6; - @Before + @BeforeEach @Override public void setUp() throws Exception { utility = new HBaseTestingUtil(); @@ -70,17 +67,17 @@ public boolean evaluate() { admin = utility.getAdmin(); } - @After + @AfterEach @Override public void tearDown() throws Exception { utility.shutdownMiniCluster(); } @Test - public void testCompactingTables() throws Exception { + public void testCompactingTables(TestInfo testInfo) throws Exception { List tableNames = Lists.newArrayList(); for (int i = 0; i < 10; i++) { - tableNames.add(createTable(name.getMethodName() + "___" + i)); + tableNames.add(createTable(testInfo.getTestMethod().get().getName() + "___" + i)); } // Delay a bit, so we can set the table TTL to 5 seconds diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java index 0b989b8029f0..b02d27b55fd2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -66,7 +66,6 @@ import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; -import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -544,8 +543,7 @@ protected void deleteMetaRegion(Configuration conf, boolean unassign, boolean hd } } - @org.junit.Rule - public TestName name = new TestName(); + protected String testMethodName; public static class MasterSyncCoprocessor implements MasterCoprocessor, MasterObserver { volatile CountDownLatch tableCreationLatch = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBloomFilterChunk.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBloomFilterChunk.java index bde869b18bca..4e76a73e3186 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBloomFilterChunk.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBloomFilterChunk.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.nio.MultiByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestBloomFilterChunk { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBloomFilterChunk.class); - @Test public void testBasicBloom() throws Exception { BloomFilterChunk bf1 = new BloomFilterChunk(1000, (float) 0.01, Hash.MURMUR_HASH, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java index cf4d9aa94ac4..87709b7fb692 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Comparator; import java.util.PriorityQueue; @@ -29,22 +29,17 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestBoundedPriorityBlockingQueue { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBoundedPriorityBlockingQueue.class); - private final static int CAPACITY = 16; static class TestObject { @@ -77,12 +72,12 @@ public int compare(TestObject a, TestObject b) { private BoundedPriorityBlockingQueue queue; - @Before + @BeforeEach public void setUp() throws Exception { this.queue = new BoundedPriorityBlockingQueue<>(CAPACITY, new TestObjectComparator()); } - @After + @AfterEach public void tearDown() throws Exception { } @@ -204,7 +199,7 @@ public void testPoll() { assertEquals(testList.poll(), queue.poll()); } - assertNull(null, queue.poll()); + assertNull(queue.poll()); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java index 21bdd93e8d95..1679c0e8b126 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java @@ -17,26 +17,21 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.MultiByteBuff; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestByteBuffUtils { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestByteBuffUtils.class); - @Test public void testCopyAndCompare() throws Exception { ByteBuffer bb1 = ByteBuffer.allocate(50); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java index fa41bc5ea84a..8c5fb58f664c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java @@ -17,13 +17,16 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -32,19 +35,15 @@ import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestCompressionTest { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCompressionTest.class); - private static final Logger LOG = LoggerFactory.getLogger(TestCompressionTest.class); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java index 17abc42f3b63..d5551262a13d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java @@ -17,34 +17,28 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestConfigurationUtil { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestConfigurationUtil.class); - private Configuration conf; private Map keyValues; private String key; - @Before + @BeforeEach public void setUp() throws Exception { this.conf = new Configuration(); this.keyValues = ImmutableMap.of("k1", "v1", "k2", "v2"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConnectionCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConnectionCache.java index 3c605ae572d7..799dd5a6224a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConnectionCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConnectionCache.java @@ -17,35 +17,30 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestConnectionCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestConnectionCache.class); - private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setUp() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDown() throws IOException { UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java index 57714e0094ff..c763be055d59 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java @@ -17,21 +17,21 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; -import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Predicate; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; @@ -58,43 +58,36 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.wal.WALEdit; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@Category({ MiscTests.class, MediumTests.class }) -@RunWith(Parameterized.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; + +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate public class TestCoprocessorScanPolicy { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCoprocessorScanPolicy.class); - protected final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final byte[] F = Bytes.toBytes("fam"); private static final byte[] Q = Bytes.toBytes("qual"); private static final byte[] R = Bytes.toBytes("row"); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ScanObserver.class.getName()); TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Parameters - public static Collection parameters() { - return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED; + public static Stream parameters() { + return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED.stream().map(arr -> Arguments.of(arr)); } public TestCoprocessorScanPolicy(boolean parallelSeekEnable) { @@ -102,7 +95,7 @@ public TestCoprocessorScanPolicy(boolean parallelSeekEnable) { .setBoolean(StoreScanner.STORESCANNER_PARALLEL_SEEK_ENABLE, parallelSeekEnable); } - @Test + @TestTemplate public void testBaseCases() throws Exception { TableName tableName = TableName.valueOf("baseCases"); if (TEST_UTIL.getAdmin().tableExists(tableName)) { @@ -161,7 +154,7 @@ public void testBaseCases() throws Exception { t.close(); } - @Test + @TestTemplate public void testTTL() throws Exception { TableName tableName = TableName.valueOf("testTTL"); if (TEST_UTIL.getAdmin().tableExists(tableName)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java index 70cfe30a1437..70c96a9a06d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java @@ -17,39 +17,34 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests to make sure that the default environment edge conforms to appropriate behaviour. */ -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestDefaultEnvironmentEdge { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestDefaultEnvironmentEdge.class); - @Test public void testGetCurrentTimeUsesSystemClock() { DefaultEnvironmentEdge edge = new DefaultEnvironmentEdge(); long systemTime = EnvironmentEdgeManager.currentTime(); long edgeTime = edge.currentTime(); - assertTrue("System time must be either the same or less than the edge time", - systemTime < edgeTime || systemTime == edgeTime); + assertTrue(systemTime < edgeTime || systemTime == edgeTime, + "System time must be either the same or less than the edge time"); try { Thread.sleep(1); } catch (InterruptedException e) { fail(e.getMessage()); } long secondEdgeTime = edge.currentTime(); - assertTrue("Second time must be greater than the first", secondEdgeTime > edgeTime); + assertTrue(secondEdgeTime > edgeTime, "Second time must be greater than the first"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java index f0cc2febd6e8..a712db83eb27 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.security.Key; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.Cipher; @@ -33,17 +33,13 @@ import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestEncryptionTest { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestEncryptionTest.class); - @Test public void testTestKeyProvider() throws Exception { Configuration conf = HBaseConfiguration.create(); @@ -51,12 +47,13 @@ public void testTestKeyProvider() throws Exception { EncryptionTest.testKeyProvider(conf); } - @Test(expected = IOException.class) + @Test public void testBadKeyProvider() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, FailingKeyProvider.class.getName()); - EncryptionTest.testKeyProvider(conf); - fail("Instantiation of bad test key provider should have failed check"); + assertThrows(IOException.class, () -> { + EncryptionTest.testKeyProvider(conf); + }); } @Test @@ -66,12 +63,13 @@ public void testDefaultCipherProvider() throws Exception { EncryptionTest.testCipherProvider(conf); } - @Test(expected = IOException.class) + @Test public void testBadCipherProvider() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, FailingCipherProvider.class.getName()); - EncryptionTest.testCipherProvider(conf); - fail("Instantiation of bad test cipher provider should have failed check"); + assertThrows(IOException.class, () -> { + EncryptionTest.testCipherProvider(conf); + }); } @Test @@ -86,12 +84,13 @@ public void testAESCipher() { } } - @Test(expected = IOException.class) + @Test public void testUnknownCipher() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName()); - EncryptionTest.testEncryption(conf, "foobar", null); - fail("Test for bogus cipher should have failed"); + assertThrows(IOException.class, () -> { + EncryptionTest.testEncryption(conf, "foobar", null); + }); } @Test @@ -121,13 +120,15 @@ public void testTestEnabledWhenCryptoIsExplicitlyEnabled() { } } - @Test(expected = IOException.class) + @Test public void testTestEnabledWhenCryptoIsExplicitlyDisabled() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName()); String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES); conf.setBoolean(Encryption.CRYPTO_ENABLED_CONF_KEY, false); - EncryptionTest.testEncryption(conf, algorithm, null); + assertThrows(IOException.class, () -> { + EncryptionTest.testEncryption(conf, algorithm, null); + }); } public static class FailingKeyProvider implements KeyProvider { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java index 5e2b4b52950f..85727d146940 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.Arrays; @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableDescriptors; @@ -43,13 +42,11 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,45 +54,40 @@ * Tests for {@link FSTableDescriptors}. */ // Do not support to be executed in he same JVM as other tests -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestFSTableDescriptors { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSTableDescriptors.class); - private static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestFSTableDescriptors.class); - @Rule - public TestName name = new TestName(); - private Path testDir; - @Before - public void setUp() { - testDir = UTIL.getDataTestDir(name.getMethodName()); + @BeforeEach + public void setUp(TestInfo testInfo) { + testDir = UTIL.getDataTestDir(testInfo.getTestMethod().get().getName()); } - @AfterClass + @AfterAll public static void tearDownAfterClass() { UTIL.cleanupTestDir(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testRegexAgainstOldStyleTableInfo() { Path p = new Path(testDir, FSTableDescriptors.TABLEINFO_FILE_PREFIX); int i = FSTableDescriptors.getTableInfoSequenceIdAndFileLength(p).sequenceId; assertEquals(0, i); // Assert it won't eat garbage -- that it fails - p = new Path(testDir, "abc"); - FSTableDescriptors.getTableInfoSequenceIdAndFileLength(p); + Path p2 = new Path(testDir, "abc"); + org.junit.jupiter.api.Assertions.assertThrows(IllegalArgumentException.class, + () -> FSTableDescriptors.getTableInfoSequenceIdAndFileLength(p2)); } @Test - public void testCreateAndUpdate() throws IOException { - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + public void testCreateAndUpdate(TestInfo testInfo) throws IOException { + TableDescriptor htd = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FSTableDescriptors fstd = new FSTableDescriptors(fs, testDir); assertTrue(fstd.createTableDescriptor(htd)); @@ -103,7 +95,7 @@ public void testCreateAndUpdate() throws IOException { Path tableInfoDir = new Path(CommonFSUtils.getTableDir(testDir, htd.getTableName()), FSTableDescriptors.TABLEINFO_DIR); FileStatus[] statuses = fs.listStatus(tableInfoDir); - assertEquals("statuses.length=" + statuses.length, 1, statuses.length); + assertEquals(1, statuses.length, "statuses.length=" + statuses.length); for (int i = 0; i < 10; i++) { fstd.update(htd); } @@ -112,9 +104,9 @@ public void testCreateAndUpdate() throws IOException { } @Test - public void testSequenceIdAdvancesOnTableInfo() throws IOException { - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + public void testSequenceIdAdvancesOnTableInfo(TestInfo testInfo) throws IOException { + TableDescriptor htd = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FSTableDescriptors fstd = new FSTableDescriptors(fs, testDir); Path previousPath = null; @@ -166,22 +158,22 @@ private Path assertWriteAndReadSequenceId(final int i) { } @Test - public void testRemoves() throws IOException { + public void testRemoves(TestInfo testInfo) throws IOException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any detrius laying around. TableDescriptors htds = new FSTableDescriptors(fs, testDir); - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + TableDescriptor htd = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); htds.update(htd); assertNotNull(htds.remove(htd.getTableName())); assertNull(htds.remove(htd.getTableName())); } @Test - public void testReadingHTDFromFS() throws IOException { + public void testReadingHTDFromFS(TestInfo testInfo) throws IOException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + TableDescriptor htd = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); FSTableDescriptors fstd = new FSTableDescriptors(fs, testDir); fstd.createTableDescriptor(htd); TableDescriptor td2 = @@ -190,7 +182,7 @@ public void testReadingHTDFromFS() throws IOException { } @Test - public void testTableDescriptors() throws IOException, InterruptedException { + public void testTableDescriptors(TestInfo testInfo) throws IOException, InterruptedException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. FSTableDescriptors htds = new FSTableDescriptors(fs, testDir) { @@ -203,67 +195,68 @@ public TableDescriptor get(TableName tablename) { final int count = 10; // Write out table infos. for (int i = 0; i < count; i++) { - htds.createTableDescriptor( - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName() + i)).build()); + htds.createTableDescriptor(TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)).build()); } for (int i = 0; i < count; i++) { - assertTrue(htds.get(TableName.valueOf(name.getMethodName() + i)) != null); + assertTrue(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)) != null); } for (int i = 0; i < count; i++) { - assertTrue(htds.get(TableName.valueOf(name.getMethodName() + i)) != null); + assertTrue(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)) != null); } // Update the table infos for (int i = 0; i < count; i++) { - TableDescriptorBuilder builder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName() + i)); + TableDescriptorBuilder builder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)); builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); htds.update(builder.build()); } // Wait a while so mod time we write is for sure different. Thread.sleep(100); for (int i = 0; i < count; i++) { - assertTrue(htds.get(TableName.valueOf(name.getMethodName() + i)) != null); + assertTrue(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)) != null); } for (int i = 0; i < count; i++) { - assertTrue(htds.get(TableName.valueOf(name.getMethodName() + i)) != null); + assertTrue(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)) != null); } assertEquals(count * 4, htds.invocations); - assertTrue("expected=" + (count * 2) + ", actual=" + htds.cachehits, - htds.cachehits >= (count * 2)); + assertTrue(htds.cachehits >= (count * 2), + "expected=" + (count * 2) + ", actual=" + htds.cachehits); } @Test - public void testTableDescriptorsNoCache() throws IOException, InterruptedException { + public void testTableDescriptorsNoCache(TestInfo testInfo) + throws IOException, InterruptedException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. FSTableDescriptors htds = new FSTableDescriptorsTest(fs, testDir, false); final int count = 10; // Write out table infos. for (int i = 0; i < count; i++) { - htds.createTableDescriptor( - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName() + i)).build()); + htds.createTableDescriptor(TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)).build()); } for (int i = 0; i < 2 * count; i++) { - assertNotNull("Expected HTD, got null instead", - htds.get(TableName.valueOf(name.getMethodName() + i % 2))); + assertNotNull(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i % 2)), + "Expected HTD, got null instead"); } // Update the table infos for (int i = 0; i < count; i++) { - TableDescriptorBuilder builder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName() + i)); + TableDescriptorBuilder builder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)); builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); htds.update(builder.build()); } for (int i = 0; i < count; i++) { - assertNotNull("Expected HTD, got null instead", - htds.get(TableName.valueOf(name.getMethodName() + i))); - assertTrue("Column Family " + i + " missing", htds - .get(TableName.valueOf(name.getMethodName() + i)).hasColumnFamily(Bytes.toBytes("" + i))); + assertNotNull(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)), + "Expected HTD, got null instead"); + assertTrue(htds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)) + .hasColumnFamily(Bytes.toBytes("" + i)), "Column Family " + i + " missing"); } assertEquals(count * 4, htds.invocations); - assertEquals("expected=0, actual=" + htds.cachehits, 0, htds.cachehits); + assertEquals(0, htds.cachehits, "expected=0, actual=" + htds.cachehits); } @Test @@ -281,8 +274,9 @@ public void testGetAll() throws IOException, InterruptedException { // add hbase:meta htds .createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME).build()); - assertEquals("getAll() didn't return all TableDescriptors, expected: " + (count + 1) + " got: " - + htds.getAll().size(), count + 1, htds.getAll().size()); + assertEquals(count + 1, htds.getAll().size(), + "getAll() didn't return all TableDescriptors, expected: " + (count + 1) + " got: " + + htds.getAll().size()); } @Test @@ -302,13 +296,15 @@ public void testParallelGetAll() throws IOException, InterruptedException { .createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME).build()); int getTableDescriptorSize = htds.getAll().size(); - assertEquals("getAll() didn't return all TableDescriptors, expected: " + (count + 1) + " got: " - + getTableDescriptorSize, count + 1, getTableDescriptorSize); + assertEquals(count + 1, getTableDescriptorSize, + "getAll() didn't return all TableDescriptors, expected: " + (count + 1) + " got: " + + getTableDescriptorSize); // get again to check whether the cache works well getTableDescriptorSize = htds.getAll().size(); - assertEquals("getAll() didn't return all TableDescriptors with cache, expected: " + (count + 1) - + " got: " + getTableDescriptorSize, count + 1, getTableDescriptorSize); + assertEquals(count + 1, getTableDescriptorSize, + "getAll() didn't return all TableDescriptors with cache, expected: " + (count + 1) + " got: " + + getTableDescriptorSize); } @Test @@ -341,7 +337,7 @@ public void testGetAllOrdering() throws Exception { } @Test - public void testCacheConsistency() throws IOException, InterruptedException { + public void testCacheConsistency(TestInfo testInfo) throws IOException, InterruptedException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. FSTableDescriptors chtds = new FSTableDescriptorsTest(fs, testDir); @@ -350,13 +346,14 @@ public void testCacheConsistency() throws IOException, InterruptedException { final int count = 10; // Write out table infos via non-cached FSTableDescriptors for (int i = 0; i < count; i++) { - nonchtds.createTableDescriptor( - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName() + i)).build()); + nonchtds.createTableDescriptor(TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)).build()); } // Calls to getAll() won't increase the cache counter, do per table. for (int i = 0; i < count; i++) { - assertTrue(chtds.get(TableName.valueOf(name.getMethodName() + i)) != null); + assertTrue( + chtds.get(TableName.valueOf(testInfo.getTestMethod().get().getName() + i)) != null); } assertTrue(nonchtds.getAll().size() == chtds.getAll().size()); @@ -372,9 +369,8 @@ public void testCacheConsistency() throws IOException, InterruptedException { for (Map.Entry entry : chtds.getAll().entrySet()) { String t = (String) entry.getKey(); TableDescriptor nchtd = entry.getValue(); - assertTrue( - "expected " + htd.toString() + " got: " + chtds.get(TableName.valueOf(t)).toString(), - (nchtd.equals(chtds.get(TableName.valueOf(t))))); + assertTrue((nchtd.equals(chtds.get(TableName.valueOf(t)))), + "expected " + htd.toString() + " got: " + chtds.get(TableName.valueOf(t)).toString()); } // this is by design, for FSTableDescriptor with cache enabled, once we have done a full scan // and load all the table descriptors to cache, we will not go to file system again, as the only @@ -388,17 +384,17 @@ public void testNoSuchTable() throws IOException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any detrius laying around. TableDescriptors htds = new FSTableDescriptors(fs, testDir); - assertNull("There shouldn't be any HTD for this table", - htds.get(TableName.valueOf("NoSuchTable"))); + assertNull(htds.get(TableName.valueOf("NoSuchTable")), + "There shouldn't be any HTD for this table"); } @Test - public void testUpdates() throws IOException { + public void testUpdates(TestInfo testInfo) throws IOException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any detrius laying around. TableDescriptors htds = new FSTableDescriptors(fs, testDir); - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + TableDescriptor htd = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); htds.update(htd); htds.update(htd); htds.update(htd); @@ -441,9 +437,10 @@ public void testReadingInvalidDirectoryFromFS() throws IOException { } @Test - public void testCreateTableDescriptorUpdatesIfExistsAlready() throws IOException { - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + public void testCreateTableDescriptorUpdatesIfExistsAlready(TestInfo testInfo) + throws IOException { + TableDescriptor htd = TableDescriptorBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FSTableDescriptors fstd = new FSTableDescriptors(fs, testDir); assertTrue(fstd.createTableDescriptor(htd)); @@ -456,11 +453,12 @@ public void testCreateTableDescriptorUpdatesIfExistsAlready() throws IOException } @Test - public void testIgnoreBrokenTableDescriptorFiles() throws IOException { - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build(); + public void testIgnoreBrokenTableDescriptorFiles(TestInfo testInfo) throws IOException { + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build(); TableDescriptor newHtd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptorBuilder.newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf2")).build(); assertNotEquals(newHtd, htd); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java index c5555ec89584..80f8ec57d31b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -39,7 +40,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.StreamCapabilities; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -55,31 +55,26 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test {@link FSUtils}. */ -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestFSUtils { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSUtils.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFSUtils.class); private HBaseTestingUtil htu; private FileSystem fs; private Configuration conf; - @Before + @BeforeEach public void setUp() throws IOException { htu = new HBaseTestingUtil(); fs = htu.getTestFileSystem(); @@ -235,8 +230,8 @@ private void testComputeHDFSBlocksDistribution( blocksDistribution.getTopHosts().size() != 3 && EnvironmentEdgeManager.currentTime() < maxTime ); - assertEquals("Wrong number of hosts distributing blocks.", 3, - blocksDistribution.getTopHosts().size()); + assertEquals(3, blocksDistribution.getTopHosts().size(), + "Wrong number of hosts distributing blocks."); } finally { htu.shutdownMiniDFSCluster(); } @@ -275,7 +270,7 @@ public void testVersion() throws DeserializationException, IOException { } catch (FileSystemVersionException e) { thrown = true; } - assertTrue("Expected FileSystemVersionException", thrown); + assertTrue(thrown, "Expected FileSystemVersionException"); // Write out a good version file. See if we can read it in and convert. String version = HConstants.FILE_SYSTEM_VERSION; writeVersionFile(versionFile, version); @@ -299,7 +294,7 @@ public void testVersion() throws DeserializationException, IOException { } catch (FileSystemVersionException e) { thrown = true; } - assertTrue("Expected FileSystemVersionException", thrown); + assertTrue(thrown, "Expected FileSystemVersionException"); } @Test @@ -354,7 +349,7 @@ public void testDeleteAndExists() throws Exception { try { FSDataOutputStream out = FSUtils.create(conf, fs, p, perms, null); out.close(); - assertTrue("The created file should be present", CommonFSUtils.isExists(fs, p)); + assertTrue(CommonFSUtils.isExists(fs, p), "The created file should be present"); // delete the file with recursion as false. Only the file will be deleted. CommonFSUtils.delete(fs, p, false); // Create another file @@ -362,7 +357,7 @@ public void testDeleteAndExists() throws Exception { out1.close(); // delete the file with recursion as false. Still the file only will be deleted CommonFSUtils.delete(fs, p1, true); - assertFalse("The created file should be present", CommonFSUtils.isExists(fs, p1)); + assertFalse(CommonFSUtils.isExists(fs, p1), "The created file should be present"); // and then cleanup } finally { CommonFSUtils.delete(fs, p, true); @@ -395,7 +390,7 @@ public void testRenameAndSetModifyTime() throws Exception { FSDataOutputStream out = fs.create(p); out.close(); - assertTrue("The created file should be present", CommonFSUtils.isExists(fs, p)); + assertTrue(CommonFSUtils.isExists(fs, p), "The created file should be present"); long expect = EnvironmentEdgeManager.currentTime() + 1000; assertNotEquals(expect, fs.getFileStatus(p).getModificationTime()); @@ -408,8 +403,8 @@ public void testRenameAndSetModifyTime() throws Exception { Path dst = new Path(testDir, dstFile); assertTrue(CommonFSUtils.renameAndSetModifyTime(fs, p, dst)); - assertFalse("The moved file should not be present", CommonFSUtils.isExists(fs, p)); - assertTrue("The dst file should be present", CommonFSUtils.isExists(fs, dst)); + assertFalse(CommonFSUtils.isExists(fs, p), "The moved file should not be present"); + assertTrue(CommonFSUtils.isExists(fs, dst), "The dst file should be present"); assertEquals(expect, fs.getFileStatus(dst).getModificationTime()); cluster.shutdown(); @@ -437,13 +432,14 @@ private void verifyNoHDFSApiInvocationForDefaultPolicy() throws URISyntaxExcepti CommonFSUtils.setStoragePolicy(testFs, new Path("non-exist"), HConstants.DEFAULT_WAL_STORAGE_POLICY, true); } catch (IOException e) { - Assert.fail("Should have bypassed the FS API when setting default storage policy"); + org.junit.jupiter.api.Assertions + .fail("Should have bypassed the FS API when setting default storage policy"); } // There should be exception thrown when given non-default storage policy, which indicates the // HDFS API has been called try { CommonFSUtils.setStoragePolicy(testFs, new Path("non-exist"), "HOT", true); - Assert.fail("Should have invoked the FS API but haven't"); + fail("Should have invoked the FS API but haven't"); } catch (IOException e) { // expected given an invalid path } @@ -499,9 +495,9 @@ private void verifyFileInDirWithStoragePolicy(final String policy) throws Except String hdfsDefaultPolicy = hfs.getStoragePolicyName(hfs.getHomeDirectory()); LOG.debug("The default hdfs storage policy (indicated by home path: " + hfs.getHomeDirectory() + ") is " + hdfsDefaultPolicy); - Assert.assertEquals(hdfsDefaultPolicy, policySet); + assertEquals(hdfsDefaultPolicy, policySet); } else { - Assert.assertEquals(policy, policySet); + assertEquals(policy, policySet); } // will assert existence before deleting. cleanupFile(fs, testDir); @@ -636,15 +632,15 @@ private void pReadFile(FileSystem fileSys, Path name) throws IOException { // should throw an exception res = e; } - assertTrue("Error reading beyond file boundary.", res != null); + assertTrue(res != null, "Error reading beyond file boundary."); stm.close(); } private void checkAndEraseData(byte[] actual, int from, byte[] expected, String message) { for (int idx = 0; idx < actual.length; idx++) { - assertEquals(message + " byte " + (from + idx) + " differs. expected " + expected[from + idx] - + " actual " + actual[idx], actual[idx], expected[from + idx]); + assertEquals(expected[from + idx], actual[idx], message + " byte " + (from + idx) + + " differs. expected " + expected[from + idx] + " actual " + actual[idx]); actual[idx] = 0; } } @@ -663,7 +659,7 @@ private void doPread(FSDataInputStream stm, long position, byte[] buffer, int of while (nread < length) { int nbytes = stm.read(position + nread, buffer, offset + nread, length - nread); - assertTrue("Error in pread", nbytes > 0); + assertTrue(nbytes > 0, "Error in pread"); nread += nbytes; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java index 634db242a826..10d6b4e3819d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java @@ -17,35 +17,30 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test {@link FSUtils}. */ -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestFSVisitor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSVisitor.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFSVisitor.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @@ -60,7 +55,7 @@ public class TestFSVisitor { private Path tableDir; private Path rootDir; - @Before + @BeforeEach public void setUp() throws Exception { fs = FileSystem.get(TEST_UTIL.getConfiguration()); rootDir = TEST_UTIL.getDataTestDir("hbase"); @@ -72,7 +67,7 @@ public void setUp() throws Exception { CommonFSUtils.logFileSystemState(fs, rootDir, LOG); } - @After + @AfterEach public void tearDown() throws Exception { fs.delete(rootDir, true); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseConfTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseConfTool.java index 693d8194e98c..0b40a5a3c5ac 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseConfTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseConfTool.java @@ -17,27 +17,22 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.PrintStream; import java.util.LinkedList; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.master.cleaner.TimeToLiveLogCleaner; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestHBaseConfTool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseConfTool.class); - @Test public void testHBaseConfTool() { String[] args = { TimeToLiveLogCleaner.TTL_CONF_KEY }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckCleanReplicationBarriers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckCleanReplicationBarriers.java index c8e96383492a..f959a1b4465b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckCleanReplicationBarriers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckCleanReplicationBarriers.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.ClientMetaTableAccessor; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -52,19 +51,16 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; -@Category({ ReplicationTests.class, MediumTests.class }) +@Tag(ReplicationTests.TAG) +@Tag(MediumTests.TAG) public class TestHBaseFsckCleanReplicationBarriers { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseFsckCleanReplicationBarriers.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); @@ -78,7 +74,7 @@ public class TestHBaseFsckCleanReplicationBarriers { private static String COLUMN_FAMILY = "info"; - @BeforeClass + @BeforeAll public static void setUp() throws Exception { UTIL.startMiniCluster(1); QUEUE_STORAGE = ReplicationStorageFactory.getReplicationQueueStorage(UTIL.getConnection(), @@ -86,7 +82,7 @@ public static void setUp() throws Exception { createPeer(); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } @@ -111,10 +107,10 @@ public void testCleanReplicationBarrierWithDeletedTable() throws Exception { regionInfos.add(regionInfo); addStateAndBarrier(regionInfo, RegionState.State.OPEN, 10, 100); updatePushedSeqId(regionInfo, 10); - assertEquals("check if there is lastPushedId", 10, - QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_1)); - assertEquals("check if there is lastPushedId", 10, - QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_2)); + assertEquals(10, QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_1), + "check if there is lastPushedId"); + assertEquals(10, QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_2), + "check if there is lastPushedId"); } Scan barrierScan = new Scan(); barrierScan.setCaching(100); @@ -134,10 +130,10 @@ public void testCleanReplicationBarrierWithDeletedTable() throws Exception { boolean cleaned = HbckTestingUtil.cleanReplicationBarrier(UTIL.getConfiguration(), tableName); assertTrue(cleaned); for (RegionInfo regionInfo : regionInfos) { - assertEquals("check if there is lastPushedId", -1, - QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_1)); - assertEquals("check if there is lastPushedId", -1, - QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_2)); + assertEquals(-1, QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_1), + "check if there is lastPushedId"); + assertEquals(-1, QUEUE_STORAGE.getLastSequenceId(regionInfo.getEncodedName(), PEER_2), + "check if there is lastPushedId"); } cleaned = HbckTestingUtil.cleanReplicationBarrier(UTIL.getConfiguration(), tableName); assertFalse(cleaned); @@ -159,18 +155,18 @@ public void testCleanReplicationBarrierWithExistTable() throws Exception { for (RegionInfo region : UTIL.getAdmin().getRegions(tableName)) { addStateAndBarrier(region, RegionState.State.OFFLINE, 10, 100); updatePushedSeqId(region, 10); - assertEquals("check if there is lastPushedId", 10, - QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_1)); - assertEquals("check if there is lastPushedId", 10, - QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_2)); + assertEquals(10, QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_1), + "check if there is lastPushedId"); + assertEquals(10, QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_2), + "check if there is lastPushedId"); } boolean cleaned = HbckTestingUtil.cleanReplicationBarrier(UTIL.getConfiguration(), tableName); assertTrue(cleaned); for (RegionInfo region : UTIL.getAdmin().getRegions(tableName)) { - assertEquals("check if there is lastPushedId", -1, - QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_1)); - assertEquals("check if there is lastPushedId", -1, - QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_2)); + assertEquals(-1, QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_1), + "check if there is lastPushedId"); + assertEquals(-1, QUEUE_STORAGE.getLastSequenceId(region.getEncodedName(), PEER_2), + "check if there is lastPushedId"); } cleaned = HbckTestingUtil.cleanReplicationBarrier(UTIL.getConfiguration(), tableName); assertFalse(cleaned); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java index 83ce9d788334..f97b548eb8b4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java @@ -17,29 +17,24 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.HbckRegionInfo.MetaEntry; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test the comparator used by Hbck. */ -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestHBaseFsckComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseFsckComparator.class); - TableName table = TableName.valueOf("table1"); TableName table2 = TableName.valueOf("table2"); byte[] keyStart = Bytes.toBytes(""); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java index 560ebc70c065..5e034ca058b2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.security.Key; import java.util.ArrayList; @@ -27,7 +27,6 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -52,29 +51,25 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; //revisit later -@Ignore -@Category({ MiscTests.class, MediumTests.class }) +@Disabled +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestHBaseFsckEncryption { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseFsckEncryption.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private Configuration conf; private TableDescriptor tableDescriptor; private Key cfKey; - @Before + @BeforeEach public void setUp() throws Exception { conf = TEST_UTIL.getConfiguration(); conf.setInt("hfile.format.version", 3); @@ -105,7 +100,7 @@ public void setUp() throws Exception { TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -133,8 +128,8 @@ public void testFsckWithEncryption() throws Exception { final List paths = findStorefilePaths(tableDescriptor.getTableName()); assertTrue(paths.size() > 0); for (Path path : paths) { - assertTrue("Store file " + path + " has incorrect key", - Bytes.equals(cfKey.getEncoded(), extractHFileKey(path))); + assertTrue(Bytes.equals(cfKey.getEncoded(), extractHFileKey(path)), + "Store file " + path + " has incorrect key"); } // Insure HBck doesn't consider them corrupt @@ -165,9 +160,9 @@ private byte[] extractHFileKey(Path path) throws Exception { HFile.createReader(TEST_UTIL.getTestFileSystem(), path, new CacheConfig(conf), true, conf); try { Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext(); - assertNotNull("Reader has a null crypto context", cryptoContext); + assertNotNull(cryptoContext, "Reader has a null crypto context"); Key key = cryptoContext.getKey(); - assertNotNull("Crypto context has no key", key); + assertNotNull(key, "Crypto context has no key"); return key.getEncoded(); } finally { reader.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java index b24b721762d3..d0ee0dae5513 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.SynchronousQueue; @@ -25,7 +25,6 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -36,26 +35,23 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; // revisit later -@Ignore -@Category({ MiscTests.class, MediumTests.class }) +@Disabled +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestHBaseFsckMOB extends BaseTestHBaseFsck { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseFsckMOB.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.getConfiguration().set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, MasterSyncCoprocessor.class.getName()); @@ -87,7 +83,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.waitUntilAllRegionsAssigned(TableName.META_TABLE_NAME); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { tableExecutorService.shutdown(); hbfsckExecutorService.shutdown(); @@ -95,7 +91,7 @@ public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setUp() { EnvironmentEdgeManager.reset(); } @@ -105,8 +101,8 @@ public void setUp() { */ @SuppressWarnings("deprecation") @Test - public void testQuarantineCorruptMobFile() throws Exception { - TableName table = TableName.valueOf(name.getMethodName()); + public void testQuarantineCorruptMobFile(TestInfo testInfo) throws Exception { + TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); try { setupMobTable(table); assertEquals(ROWKEYS.length, countRows()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java index 6580a65fbe2d..5a691dd8d58c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplication.java @@ -17,12 +17,11 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Collections; import java.util.List; import java.util.stream.Stream; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -37,37 +36,31 @@ import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.HbckErrorReporter.ERROR_CODE; import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ ReplicationTests.class, MediumTests.class }) +@Tag(ReplicationTests.TAG) +@Tag(MediumTests.TAG) public class TestHBaseFsckReplication { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseFsckReplication.class); - private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @Rule - public final TestName name = new TestName(); - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { UTIL.getConfiguration().setBoolean("hbase.write.hbck1.lock.file", false); UTIL.startMiniCluster(1); - TableName tableName = TableName.valueOf("replication_" + name.getMethodName()); + TableName tableName = + TableName.valueOf("replication_" + testInfo.getTestMethod().get().getName()); UTIL.getAdmin() .createTable(ReplicationStorageFactory.createReplicationQueueTableDescriptor(tableName)); UTIL.getConfiguration().set(ReplicationStorageFactory.REPLICATION_QUEUE_TABLE_NAME, tableName.getNameAsString()); } - @After + @AfterEach public void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckWithoutTableHbaseReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckWithoutTableHbaseReplication.java index 279962c934fd..1184ca1e859f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckWithoutTableHbaseReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckWithoutTableHbaseReplication.java @@ -17,52 +17,46 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.replication.ReplicationStorageFactory; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestHBaseFsckWithoutTableHbaseReplication { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHBaseFsckWithoutTableHbaseReplication.class); - - @ClassRule - public static final TestName name = new TestName(); - private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - private static final TableName tableName = - TableName.valueOf("replication_" + name.getMethodName()); - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { UTIL.getConfiguration().setBoolean("hbase.write.hbck1.lock.file", false); + TableName tableName = + TableName.valueOf("replication_" + testInfo.getTestMethod().get().getName()); UTIL.getConfiguration().set(ReplicationStorageFactory.REPLICATION_QUEUE_TABLE_NAME, tableName.getNameAsString()); UTIL.startMiniCluster(1); } - @After + @AfterEach public void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } @Test - public void test() throws Exception { + public void test(TestInfo testInfo) throws Exception { + TableName tableName = + TableName.valueOf("replication_" + testInfo.getTestMethod().get().getName()); assertFalse(UTIL.getAdmin().tableExists(tableName)); HBaseFsck hBaseFsck = HbckTestingUtil.doFsck(UTIL.getConfiguration(), true); assertEquals(0, hBaseFsck.getRetCode()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java index 9ed78282146d..8bc23c6758c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java @@ -17,42 +17,33 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Test that the utility works as expected */ -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestHFileArchiveUtil { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileArchiveUtil.class); - private Path rootDir = new Path("./"); - @Rule - public TestName name = new TestName(); - @Test - public void testGetTableArchivePath() { - assertNotNull( - HFileArchiveUtil.getTableArchivePath(rootDir, TableName.valueOf(name.getMethodName()))); + public void testGetTableArchivePath(TestInfo testInfo) { + assertNotNull(HFileArchiveUtil.getTableArchivePath(rootDir, + TableName.valueOf(testInfo.getTestMethod().get().getName()))); } @Test @@ -63,18 +54,19 @@ public void testGetArchivePath() throws Exception { } @Test - public void testRegionArchiveDir() { + public void testRegionArchiveDir(TestInfo testInfo) { Path regionDir = new Path("region"); assertNotNull(HFileArchiveUtil.getRegionArchiveDir(rootDir, - TableName.valueOf(name.getMethodName()), regionDir)); + TableName.valueOf(testInfo.getTestMethod().get().getName()), regionDir)); } @Test - public void testGetStoreArchivePath() throws IOException { + public void testGetStoreArchivePath(TestInfo testInfo) throws IOException { byte[] family = Bytes.toBytes("Family"); - Path tabledir = CommonFSUtils.getTableDir(rootDir, TableName.valueOf(name.getMethodName())); - RegionInfo region = - RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + Path tabledir = CommonFSUtils.getTableDir(rootDir, + TableName.valueOf(testInfo.getTestMethod().get().getName())); + RegionInfo region = RegionInfoBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); Configuration conf = new Configuration(); CommonFSUtils.setRootDir(conf, new Path("root")); assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, region, tabledir, family)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java index 198ceee3de46..d5ee227903e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Map; import java.util.Random; @@ -29,22 +29,18 @@ import java.util.concurrent.Future; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) // Medium as it creates 100 threads; seems better to run it isolated public class TestIdLock { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestIdLock.class); - private static final Logger LOG = LoggerFactory.getLogger(TestIdLock.class); private static final int NUM_IDS = 16; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockStrongRef.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockStrongRef.java index 29f93bc3a4ad..590991c21112 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockStrongRef.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockStrongRef.java @@ -17,23 +17,19 @@ */ package org.apache.hadoop.hbase.util; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestIdReadWriteLockStrongRef { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestIdReadWriteLockStrongRef.class); - private static final Logger LOG = LoggerFactory.getLogger(TestIdReadWriteLockStrongRef.class); private IdReadWriteLockStrongRef idLock = new IdReadWriteLockStrongRef<>(); @@ -44,9 +40,9 @@ public void testGetLock() throws Exception { Long offset_2 = 2L; ReentrantReadWriteLock offsetLock_1 = idLock.getLock(offset_1); ReentrantReadWriteLock offsetLock_2 = idLock.getLock(offset_1); - Assert.assertEquals(offsetLock_1, offsetLock_2); + assertEquals(offsetLock_1, offsetLock_2); ReentrantReadWriteLock offsetLock_3 = idLock.getLock(offset_2); - Assert.assertNotEquals(offsetLock_1, offsetLock_3); + assertNotEquals(offsetLock_1, offsetLock_3); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockWithObjectPool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockWithObjectPool.java index 40ebb3380115..33c665c507e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockWithObjectPool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLockWithObjectPool.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.util.Arrays; import java.util.Map; import java.util.Random; import java.util.concurrent.Callable; @@ -33,27 +32,23 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.IdReadWriteLockWithObjectPool.ReferenceType; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@RunWith(Parameterized.class) -@Category({ MiscTests.class, MediumTests.class }) +@HBaseParameterizedTestTemplate +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) // Medium as it creates 100 threads; seems better to run it isolated public class TestIdReadWriteLockWithObjectPool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestIdReadWriteLockWithObjectPool.class); - private static final Logger LOG = LoggerFactory.getLogger(TestIdReadWriteLockWithObjectPool.class); @@ -61,14 +56,15 @@ public class TestIdReadWriteLockWithObjectPool { private static final int NUM_THREADS = 128; private static final int NUM_SECONDS = 15; - @Parameterized.Parameter - public IdReadWriteLockWithObjectPool idLock; + private IdReadWriteLockWithObjectPool idLock; + + public TestIdReadWriteLockWithObjectPool(IdReadWriteLockWithObjectPool idLock) { + this.idLock = idLock; + } - @Parameterized.Parameters - public static Iterable data() { - return Arrays - .asList(new Object[][] { { new IdReadWriteLockWithObjectPool(ReferenceType.WEAK) }, - { new IdReadWriteLockWithObjectPool(ReferenceType.SOFT) } }); + public static Stream parameters() { + return Stream.of(Arguments.of(new IdReadWriteLockWithObjectPool(ReferenceType.WEAK)), + Arguments.of(new IdReadWriteLockWithObjectPool(ReferenceType.SOFT))); } private Map idOwner = new ConcurrentHashMap<>(); @@ -118,7 +114,7 @@ public Boolean call() throws Exception { } - @Test + @TestTemplate public void testMultipleClients() throws Exception { ExecutorService exec = Executors.newFixedThreadPool(NUM_THREADS); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java index 518c8aec343e..199fa4ef1c40 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java @@ -17,25 +17,20 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests that the incrementing environment edge increments time instead of using the default. */ -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestIncrementingEnvironmentEdge { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestIncrementingEnvironmentEdge.class); - @Test public void testGetCurrentTimeUsesSystemClock() { IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java index 6604f29b54a7..65bbe183df86 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; @@ -29,22 +29,17 @@ import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import javax.management.openmbean.CompositeData; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestJSONMetricUtil { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestJSONMetricUtil.class); - private static final Logger LOG = LoggerFactory.getLogger(TestJSONMetricUtil.class); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestLossyCounting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestLossyCounting.java index 4465ec674fa1..fe139cac31d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestLossyCounting.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestLossyCounting.java @@ -17,25 +17,20 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestLossyCounting { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLossyCounting.class); - private final Configuration conf = HBaseConfiguration.create(); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcDispatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcDispatcher.java index a3ffbd858d83..80e9eb3b8274 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcDispatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcDispatcher.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.util; import static org.apache.hadoop.hbase.master.HMaster.HBASE_MASTER_RSPROC_DISPATCHER_CLASS; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; @@ -41,15 +41,12 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,22 +55,16 @@ /** * Testing custom RSProcedureDispatcher to ensure retry limit can be imposed on certain errors. */ -@Category({ MiscTests.class, LargeTests.class }) +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) public class TestProcDispatcher { private static final Logger LOG = LoggerFactory.getLogger(TestProcDispatcher.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcDispatcher.class); - - @Rule - public TestName name = new TestName(); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static ServerName rs0; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.getConfiguration().set(HBASE_MASTER_RSPROC_DISPATCHER_CLASS, RSProcDispatcher.class.getName()); @@ -84,14 +75,14 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.getAdmin().balancerSwitch(false, true); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("fam1")).build(); int startKey = 0; @@ -100,9 +91,9 @@ public void setUp() throws Exception { } @Test - public void testRetryLimitOnConnClosedErrors() throws Exception { + public void testRetryLimitOnConnClosedErrors(TestInfo testInfo) throws Exception { HbckChore hbckChore = new HbckChore(TEST_UTIL.getHBaseCluster().getMaster()); - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -122,9 +113,9 @@ public void testRetryLimitOnConnClosedErrors() throws Exception { hbckChore.choreForTesting(); HbckReport hbckReport = hbckChore.getLastReport(); - Assert.assertEquals(0, hbckReport.getInconsistentRegions().size()); - Assert.assertEquals(0, hbckReport.getOrphanRegionsOnFS().size()); - Assert.assertEquals(0, hbckReport.getOrphanRegionsOnRS().size()); + assertEquals(0, hbckReport.getInconsistentRegions().size()); + assertEquals(0, hbckReport.getOrphanRegionsOnFS().size()); + assertEquals(0, hbckReport.getOrphanRegionsOnRS().size()); HRegion region0 = hRegionServer0.getRegions().get(0); // move all regions from server1 to server0 diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover1.java index 503f0dbdd971..0613a84ff096 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover1.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.File; import java.io.FileWriter; @@ -27,7 +27,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; @@ -42,16 +41,14 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.RegionMover.RegionMoverBuilder; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,34 +56,31 @@ * Tests for Region Mover Load/Unload functionality with and without ack mode and also to test * exclude functionality useful for rack decommissioning */ -@Category({ MiscTests.class, LargeTests.class }) +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) public class TestRegionMover1 { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionMover1.class); - - @Rule - public TestName name = new TestName(); + private String testMethodName; private static final Logger LOG = LoggerFactory.getLogger(TestRegionMover1.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(3); TEST_UTIL.getAdmin().balancerSwitch(false, true); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + testMethodName = testInfo.getTestMethod().get().getName(); + final TableName tableName = TableName.valueOf(testMethodName); TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("fam1")).build(); String startKey = "a"; @@ -94,9 +88,9 @@ public void setUp() throws Exception { TEST_UTIL.getAdmin().createTable(tableDesc, Bytes.toBytes(startKey), Bytes.toBytes(endKey), 9); } - @After + @AfterEach public void tearDown() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); TEST_UTIL.getAdmin().disableTable(tableName); TEST_UTIL.getAdmin().deleteTable(tableName); } @@ -351,7 +345,7 @@ public void testDecomServerExclusionWithAck() throws Exception { int sourceServerRegions = regionServer.getRegions().size(); try (RegionMover regionMover = rmBuilder.build()) { - Assert.assertTrue(regionMover.unload()); + Assertions.assertTrue(regionMover.unload()); LOG.info("Unloading {}", hostname); assertEquals(0, regionServer.getNumberOfOnlineRegions()); assertEquals(regionsExcludeServer, cluster.getRegionServer(1).getNumberOfOnlineRegions()); @@ -360,11 +354,11 @@ public void testDecomServerExclusionWithAck() throws Exception { List regionList = cluster.getRegionServer(1).getRegions(); int index = 0; for (HRegion hRegion : regionList) { - Assert.assertEquals(hRegion, regions.get(index++)); + Assertions.assertEquals(hRegion, regions.get(index++)); } - Assert.assertEquals(targetServerRegions + sourceServerRegions, + Assertions.assertEquals(targetServerRegions + sourceServerRegions, cluster.getRegionServer(2).getNumberOfOnlineRegions()); - Assert.assertTrue(regionMover.load()); + Assertions.assertTrue(regionMover.load()); } TEST_UTIL.getAdmin().recommissionRegionServer(excludeServer.getServerName(), @@ -406,7 +400,7 @@ public void testDecomServerExclusion() throws Exception { int sourceServerRegions = sourceRegionServer.getRegions().size(); try (RegionMover regionMover = rmBuilder.build()) { - Assert.assertTrue(regionMover.unload()); + Assertions.assertTrue(regionMover.unload()); LOG.info("Unloading {}", hostname); assertEquals(0, sourceRegionServer.getNumberOfOnlineRegions()); assertEquals(regionsExcludeServer, cluster.getRegionServer(0).getNumberOfOnlineRegions()); @@ -415,11 +409,11 @@ public void testDecomServerExclusion() throws Exception { List regionList = cluster.getRegionServer(0).getRegions(); int index = 0; for (HRegion hRegion : regionList) { - Assert.assertEquals(hRegion, regions.get(index++)); + Assertions.assertEquals(hRegion, regions.get(index++)); } - Assert.assertEquals(targetServerRegions + sourceServerRegions, + Assertions.assertEquals(targetServerRegions + sourceServerRegions, cluster.getRegionServer(2).getNumberOfOnlineRegions()); - Assert.assertTrue(regionMover.load()); + Assertions.assertTrue(regionMover.load()); } TEST_UTIL.getAdmin().recommissionRegionServer(excludeServer.getServerName(), @@ -452,7 +446,7 @@ public void testExcludeAndDecomServers() throws Exception { new RegionMoverBuilder(sourceServer, TEST_UTIL.getConfiguration()).ack(true) .excludeFile(excludeFile.getCanonicalPath()); try (RegionMover regionMover = rmBuilder.build()) { - Assert.assertFalse(regionMover.unload()); + Assertions.assertFalse(regionMover.unload()); } TEST_UTIL.getAdmin().recommissionRegionServer(decomServer.getServerName(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover2.java index 15cc71fe4464..919bf76c6619 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover2.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; @@ -43,16 +42,14 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,39 +57,37 @@ * Tests for Region Mover Load/Unload functionality with and without ack mode and also to test * exclude functionality useful for rack decommissioning */ -@Category({ MiscTests.class, LargeTests.class }) +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) public class TestRegionMover2 { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionMover2.class); private static final String CF = "fam1"; - @Rule - public TestName name = new TestName(); + private String testMethodName; private static final Logger LOG = LoggerFactory.getLogger(TestRegionMover2.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(3); TEST_UTIL.getAdmin().balancerSwitch(false, true); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { - createTable(name.getMethodName()); + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + testMethodName = testInfo.getTestMethod().get().getName(); + createTable(testMethodName); } - @After + @AfterEach public void tearDown() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); TEST_UTIL.getAdmin().disableTable(tableName); TEST_UTIL.getAdmin().deleteTable(tableName); } @@ -109,7 +104,7 @@ private TableName createTable(String name) throws IOException { @Test public void testWithMergedRegions() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -128,18 +123,18 @@ public void testWithMergedRegions() throws Exception { try (RegionMover rm = rmBuilder.build()) { LOG.debug("Unloading {}", regionServer.getServerName()); rm.unload(); - Assert.assertEquals(0, regionServer.getNumberOfOnlineRegions()); + Assertions.assertEquals(0, regionServer.getNumberOfOnlineRegions()); LOG.debug("Successfully Unloaded, now Loading"); admin.mergeRegionsAsync(new byte[][] { hRegions.get(0).getRegionInfo().getRegionName(), hRegions.get(1).getRegionInfo().getRegionName() }, true).get(5, TimeUnit.SECONDS); - Assert.assertTrue(rm.load()); - Assert.assertEquals(numRegions - 2, regionServer.getNumberOfOnlineRegions()); + Assertions.assertTrue(rm.load()); + Assertions.assertEquals(numRegions - 2, regionServer.getNumberOfOnlineRegions()); } } @Test public void testWithSplitRegions() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -160,7 +155,7 @@ public void testWithSplitRegions() throws Exception { try (RegionMover rm = rmBuilder.build()) { LOG.debug("Unloading {}", regionServer.getServerName()); rm.unload(); - Assert.assertEquals(0, regionServer.getNumberOfOnlineRegions()); + Assertions.assertEquals(0, regionServer.getNumberOfOnlineRegions()); LOG.debug("Successfully Unloaded, now Loading"); HRegion hRegion = hRegions.get(1); if (hRegion.getRegionInfo().getStartKey().length == 0) { @@ -177,14 +172,14 @@ public void testWithSplitRegions() throws Exception { int midKey = startKey + (endKey - startKey) / 2; admin.splitRegionAsync(hRegion.getRegionInfo().getRegionName(), Bytes.toBytes(midKey)).get(5, TimeUnit.SECONDS); - Assert.assertTrue(rm.load()); - Assert.assertEquals(numRegions - 1, regionServer.getNumberOfOnlineRegions()); + Assertions.assertTrue(rm.load()); + Assertions.assertEquals(numRegions - 1, regionServer.getNumberOfOnlineRegions()); } } @Test public void testFailedRegionMove() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -202,17 +197,17 @@ public void testFailedRegionMove() throws Exception { try (RegionMover rm = rmBuilder.build()) { LOG.debug("Unloading {}", regionServer.getServerName()); rm.unload(); - Assert.assertEquals(0, regionServer.getNumberOfOnlineRegions()); + Assertions.assertEquals(0, regionServer.getNumberOfOnlineRegions()); LOG.debug("Successfully Unloaded, now Loading"); admin.offline(hRegions.get(0).getRegionInfo().getRegionName()); // loading regions will fail because of offline region - Assert.assertFalse(rm.load()); + Assertions.assertFalse(rm.load()); } } @Test public void testDeletedTable() throws Exception { - TableName tableNameToDelete = createTable(name.getMethodName() + "ToDelete"); + TableName tableNameToDelete = createTable(testMethodName + "ToDelete"); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HRegionServer regionServer = cluster.getRegionServer(0); String rsName = regionServer.getServerName().getAddress().toString(); @@ -222,11 +217,11 @@ public void testDeletedTable() throws Exception { try (Admin admin = TEST_UTIL.getAdmin(); RegionMover rm = rmBuilder.build()) { LOG.debug("Unloading {}", regionServer.getServerName()); rm.unload(); - Assert.assertEquals(0, regionServer.getNumberOfOnlineRegions()); + Assertions.assertEquals(0, regionServer.getNumberOfOnlineRegions()); LOG.debug("Successfully Unloaded, now delete table"); admin.disableTable(tableNameToDelete); admin.deleteTable(tableNameToDelete); - Assert.assertTrue(rm.load()); + Assertions.assertTrue(rm.load()); } } @@ -240,7 +235,7 @@ public void loadDummyDataInTable(TableName tableName) throws Exception { @Test public void testIsolateSingleRegionOnTheSameServer() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); loadDummyDataInTable(tableName); ServerName sourceServerName = findSourceServerName(tableName); // Isolating 1 region on the same region server. @@ -249,7 +244,7 @@ public void testIsolateSingleRegionOnTheSameServer() throws Exception { @Test public void testIsolateSingleRegionOnTheDifferentServer() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); loadDummyDataInTable(tableName); ServerName sourceServerName = findSourceServerName(tableName); ServerName destinationServerName = findDestinationServerName(sourceServerName); @@ -259,7 +254,7 @@ public void testIsolateSingleRegionOnTheDifferentServer() throws Exception { @Test public void testIsolateMultipleRegionsOnTheSameServer() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); loadDummyDataInTable(tableName); ServerName sourceServerName = findSourceServerName(tableName); // Isolating 2 regions on the same region server. @@ -268,7 +263,7 @@ public void testIsolateMultipleRegionsOnTheSameServer() throws Exception { @Test public void testIsolateMultipleRegionsOnTheDifferentServer() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); loadDummyDataInTable(tableName); // Isolating 2 regions on the different region server. ServerName sourceServerName = findSourceServerName(tableName); @@ -291,7 +286,7 @@ public void testIsolateMetaOnTheDifferentServer() throws Exception { @Test public void testIsolateMetaAndRandomRegionOnTheMetaServer() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); loadDummyDataInTable(tableName); ServerName metaServerSource = findMetaRSLocation(); ServerName randomSeverRegion = findSourceServerName(tableName); @@ -300,7 +295,7 @@ public void testIsolateMetaAndRandomRegionOnTheMetaServer() throws Exception { @Test public void testIsolateMetaAndRandomRegionOnTheRandomServer() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); loadDummyDataInTable(tableName); ServerName randomSeverRegion = findSourceServerName(tableName); regionIsolationOperation(randomSeverRegion, randomSeverRegion, 2, true); @@ -367,7 +362,7 @@ public ServerName findDestinationServerName(ServerName sourceServerName) throws public void regionIsolationOperation(ServerName sourceServerName, ServerName destinationServerName, int numRegionsToIsolate, boolean isolateMetaAlso) throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); HRegionServer sourceRS = cluster.getRegionServer(sourceServerName); @@ -393,10 +388,10 @@ public void regionIsolationOperation(ServerName sourceServerName, LOG.debug("Unloading {} except regions: {}", destinationRS.getServerName(), listOfRegionIDsToIsolate); rm.isolateRegions(); - Assert.assertEquals(numRegionsToIsolate, destinationRS.getNumberOfOnlineRegions()); + Assertions.assertEquals(numRegionsToIsolate, destinationRS.getNumberOfOnlineRegions()); List onlineRegions = destinationRS.getRegions(); for (int i = 0; i < numRegionsToIsolate; i++) { - Assert.assertTrue( + Assertions.assertTrue( listOfRegionIDsToIsolate.contains(onlineRegions.get(i).getRegionInfo().getEncodedName())); } LOG.debug("Successfully Isolated {} regions: {} on {}", listOfRegionIDsToIsolate.size(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover3.java index b1395c07f827..afbfeb7fcba2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover3.java @@ -21,7 +21,6 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; @@ -37,32 +36,24 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ MiscTests.class, LargeTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) public class TestRegionMover3 { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionMover3.class); - - @Rule - public TestName name = new TestName(); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static ServerName rs0; private static ServerName rs1; private static ServerName rs2; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(3); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); @@ -72,14 +63,14 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.getAdmin().balancerSwitch(false, true); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("fam1")).build(); int startKey = 0; @@ -88,8 +79,8 @@ public void setUp() throws Exception { } @Test - public void testRegionUnloadWithRack() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testRegionUnloadWithRack(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -107,9 +98,9 @@ public void testRegionUnloadWithRack() throws Exception { int numRegions1 = hRegionServer1.getNumberOfOnlineRegions(); int numRegions2 = hRegionServer2.getNumberOfOnlineRegions(); - Assert.assertTrue(numRegions0 >= 3); - Assert.assertTrue(numRegions1 >= 3); - Assert.assertTrue(numRegions2 >= 3); + Assertions.assertTrue(numRegions0 >= 3); + Assertions.assertTrue(numRegions1 >= 3); + Assertions.assertTrue(numRegions2 >= 3); int totalRegions = numRegions0 + numRegions1 + numRegions2; // source RS: rs0 @@ -136,8 +127,8 @@ public void testRegionUnloadWithRack() throws Exception { int newNumRegions0 = hRegionServer0.getNumberOfOnlineRegions(); int newNumRegions1 = hRegionServer1.getNumberOfOnlineRegions(); int newNumRegions2 = hRegionServer2.getNumberOfOnlineRegions(); - Assert.assertEquals(0, newNumRegions1); - Assert.assertEquals(totalRegions, newNumRegions0 + newNumRegions2); + Assertions.assertEquals(0, newNumRegions1); + Assertions.assertEquals(totalRegions, newNumRegions0 + newNumRegions2); } // use custom rackManager, which resolves "rack-1" for rs0 and rs1, @@ -150,9 +141,9 @@ public void testRegionUnloadWithRack() throws Exception { int newNumRegions0 = hRegionServer0.getNumberOfOnlineRegions(); int newNumRegions1 = hRegionServer1.getNumberOfOnlineRegions(); int newNumRegions2 = hRegionServer2.getNumberOfOnlineRegions(); - Assert.assertEquals(0, newNumRegions0); - Assert.assertEquals(0, newNumRegions1); - Assert.assertEquals(totalRegions, newNumRegions2); + Assertions.assertEquals(0, newNumRegions0); + Assertions.assertEquals(0, newNumRegions1); + Assertions.assertEquals(totalRegions, newNumRegions2); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverUseIp.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverUseIp.java index 6051097f6803..889eac45262f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverUseIp.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverUseIp.java @@ -21,7 +21,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -37,35 +36,30 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MiscTests.class, LargeTests.class }) +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) public class TestRegionMoverUseIp { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionMoverUseIp.class); private static final Logger LOG = LoggerFactory.getLogger(TestRegionMoverUseIp.class); - @Rule - public TestName name = new TestName(); + private String testMethodName; private static HBaseTestingUtil TEST_UTIL; private static ServerName rs0; private static ServerName rs1; private static ServerName rs2; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.setBoolean(HConstants.HBASE_SERVER_USEIP_ENABLED_KEY, true); @@ -82,14 +76,15 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.getAdmin().balancerSwitch(false, true); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + testMethodName = testInfo.getTestMethod().get().getName(); + final TableName tableName = TableName.valueOf(testMethodName); TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("fam1")).build(); int startKey = 0; @@ -99,7 +94,7 @@ public void setUp() throws Exception { @Test public void testRegionUnloadUesIp() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); Admin admin = TEST_UTIL.getAdmin(); Table table = TEST_UTIL.getConnection().getTable(tableName); @@ -117,9 +112,9 @@ public void testRegionUnloadUesIp() throws Exception { int numRegions1 = hRegionServer1.getNumberOfOnlineRegions(); int numRegions2 = hRegionServer2.getNumberOfOnlineRegions(); - Assert.assertTrue(numRegions0 >= 3); - Assert.assertTrue(numRegions1 >= 3); - Assert.assertTrue(numRegions2 >= 3); + Assertions.assertTrue(numRegions0 >= 3); + Assertions.assertTrue(numRegions1 >= 3); + Assertions.assertTrue(numRegions2 >= 3); int totalRegions = numRegions0 + numRegions1 + numRegions2; // source RS: rs0 @@ -132,8 +127,8 @@ public void testRegionUnloadUesIp() throws Exception { int newNumRegions0 = hRegionServer0.getNumberOfOnlineRegions(); int newNumRegions1 = hRegionServer1.getNumberOfOnlineRegions(); int newNumRegions2 = hRegionServer2.getNumberOfOnlineRegions(); - Assert.assertEquals(0, newNumRegions0); - Assert.assertEquals(totalRegions, newNumRegions1 + newNumRegions2); + Assertions.assertEquals(0, newNumRegions0); + Assertions.assertEquals(totalRegions, newNumRegions1 + newNumRegions2); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverWithRSGroupEnable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverWithRSGroupEnable.java index 56e103aa612e..3d261da92789 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverWithRSGroupEnable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMoverWithRSGroupEnable.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -39,12 +38,11 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.RegionMover.RegionMoverBuilder; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,31 +50,28 @@ * Test for rsgroup enable, unloaded regions from decommissoned host of a rsgroup should be assigned * to those regionservers belonging to the same rsgroup. */ -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestRegionMoverWithRSGroupEnable { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionMoverWithRSGroupEnable.class); - private static final Logger LOG = LoggerFactory.getLogger(TestRegionMoverWithRSGroupEnable.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final String TEST_RSGROUP = "test"; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { RSGroupUtil.enableRSGroup(TEST_UTIL.getConfiguration()); TEST_UTIL.startMiniCluster(5); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } private final List
rsservers = new ArrayList<>(2); - @Before + @BeforeEach public void setUp() throws Exception { Admin admin = TEST_UTIL.getAdmin(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java index da5b2bae75f7..f3f68776d84d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Collection; @@ -26,26 +26,21 @@ import java.util.List; import java.util.SortedSet; import java.util.UUID; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.ComparisonChain; import org.apache.hbase.thirdparty.com.google.common.collect.Multimap; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestRegionSplitCalculator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionSplitCalculator.class); - private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class); public static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java index 4f7d7299e54b..fad196ae3a5b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java @@ -17,18 +17,17 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.ArrayUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; @@ -41,13 +40,11 @@ import org.apache.hadoop.hbase.util.RegionSplitter.HexStringSplit; import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm; import org.apache.hadoop.hbase.util.RegionSplitter.UniformSplit; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,27 +52,21 @@ * Tests for {@link RegionSplitter}, which can create a pre-split table or do a rolling split of an * existing table. */ -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestRegionSplitter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionSplitter.class); - private final static Logger LOG = LoggerFactory.getLogger(TestRegionSplitter.class); private final static HBaseTestingUtil UTIL = new HBaseTestingUtil(); private final static String CF_NAME = "SPLIT_TEST_CF"; private final static byte xFF = (byte) 0xff; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setup() throws Exception { UTIL.startMiniCluster(2); } - @AfterClass + @AfterAll public static void teardown() throws Exception { UTIL.shutdownMiniCluster(); } @@ -84,7 +75,7 @@ public static void teardown() throws Exception { * Test creating a pre-split table using the HexStringSplit algorithm. */ @Test - public void testCreatePresplitTableHex() throws Exception { + public void testCreatePresplitTableHex(TestInfo testInfo) throws Exception { final List expectedBounds = new ArrayList<>(17); expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY); expectedBounds.add(Bytes.toBytes("10000000")); @@ -106,7 +97,7 @@ public void testCreatePresplitTableHex() throws Exception { // Do table creation/pre-splitting and verification of region boundaries preSplitTableAndVerify(expectedBounds, HexStringSplit.class.getSimpleName(), - TableName.valueOf(name.getMethodName())); + TableName.valueOf(testInfo.getTestMethod().get().getName())); } /** @@ -142,7 +133,7 @@ private void testSplitPresplitTable(RegionSplitter.NumberStringSplit splitter) t * Test creating a pre-split table using the UniformSplit algorithm. */ @Test - public void testCreatePresplitTableUniform() throws Exception { + public void testCreatePresplitTableUniform(TestInfo testInfo) throws Exception { List expectedBounds = new ArrayList<>(17); expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY); expectedBounds.add(new byte[] { 0x10, 0, 0, 0, 0, 0, 0, 0 }); @@ -164,7 +155,7 @@ public void testCreatePresplitTableUniform() throws Exception { // Do table creation/pre-splitting and verification of region boundaries preSplitTableAndVerify(expectedBounds, UniformSplit.class.getSimpleName(), - TableName.valueOf(name.getMethodName())); + TableName.valueOf(testInfo.getTestMethod().get().getName())); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java index cc59bfead350..5beb894ec467 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java @@ -17,30 +17,25 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test requirement that root directory must be a URI */ -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestRootPath { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRootPath.class); - private static final Logger LOG = LoggerFactory.getLogger(TestRootPath.class); /** The test */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java index e26477bac00b..4397e39f931b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java @@ -17,28 +17,27 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.ListIterator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestSortedList { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSortedList.class); - static class StringComparator implements Comparator { @Override public int compare(String o1, String o2) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java index a6ab532b7749..0f18bdbba8d9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestStealJobQueue.java @@ -17,34 +17,29 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestStealJobQueue { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestStealJobQueue.class); - StealJobQueue stealJobQueue; BlockingQueue stealFromQueue; - @Before + @BeforeEach public void setup() { stealJobQueue = new StealJobQueue<>(Integer::compare); stealFromQueue = stealJobQueue.getStealFromQueue(); @@ -59,8 +54,8 @@ public void testTake() throws InterruptedException { stealJobQueue.offer(4); assertEquals(3, stealJobQueue.take().intValue()); assertEquals(4, stealJobQueue.take().intValue()); - assertEquals("always take from the main queue before trying to steal", 15, - stealJobQueue.take().intValue()); + assertEquals(15, stealJobQueue.take().intValue(), + "always take from the main queue before trying to steal"); assertEquals(10, stealJobQueue.take().intValue()); assertTrue(stealFromQueue.isEmpty()); assertTrue(stealJobQueue.isEmpty()); @@ -116,8 +111,8 @@ public void testPoll() throws InterruptedException { stealJobQueue.offer(4); assertEquals(3, stealJobQueue.poll(1, TimeUnit.SECONDS).intValue()); assertEquals(4, stealJobQueue.poll(1, TimeUnit.SECONDS).intValue()); - assertEquals("always take from the main queue before trying to steal", 15, - stealJobQueue.poll(1, TimeUnit.SECONDS).intValue()); + assertEquals(15, stealJobQueue.poll(1, TimeUnit.SECONDS).intValue(), + "always take from the main queue before trying to steal"); assertEquals(10, stealJobQueue.poll(1, TimeUnit.SECONDS).intValue()); assertTrue(stealFromQueue.isEmpty()); assertTrue(stealJobQueue.isEmpty()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableDescriptorChecker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableDescriptorChecker.java index cf68888a2c77..fcc2ab4ce8fd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableDescriptorChecker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableDescriptorChecker.java @@ -17,27 +17,23 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.conf.ConfigKey; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestTableDescriptorChecker { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTableDescriptorChecker.class); @Test public void testSanityCheck() throws IOException { @@ -55,8 +51,9 @@ public void testSanityCheck() throws IOException { // Error in table configuration. t.setValue(key, "xx"); - assertThrows("Should have thrown IllegalArgumentException", DoNotRetryIOException.class, - () -> TableDescriptorChecker.sanityCheck(conf, t.build())); + assertThrows(DoNotRetryIOException.class, + () -> TableDescriptorChecker.sanityCheck(conf, t.build()), + "Should have thrown IllegalArgumentException"); // Fix the error. t.setValue(key, "1"); @@ -72,8 +69,9 @@ public void testSanityCheck() throws IOException { } t.removeColumnFamily("cf".getBytes()); t.setColumnFamily(cf.build()); - assertThrows("Should have thrown IllegalArgumentException", DoNotRetryIOException.class, - () -> TableDescriptorChecker.sanityCheck(conf, t.build())); + assertThrows(DoNotRetryIOException.class, + () -> TableDescriptorChecker.sanityCheck(conf, t.build()), + "Should have thrown IllegalArgumentException"); // Fix the error. if (viaSetValue) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestZKStringFormatter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestZKStringFormatter.java index 9621344e072e..3c757d061f20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestZKStringFormatter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestZKStringFormatter.java @@ -17,29 +17,24 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestZKStringFormatter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZKStringFormatter.class); - private ZKWatcher zkWatcher; - @Before + @BeforeEach public void setUp() throws Exception { zkWatcher = mock(ZKWatcher.class); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java index 1d48127f7d0a..045834d438c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.util.compaction; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.doReturn; @@ -36,7 +36,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; @@ -49,27 +48,23 @@ import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerForTest; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestMajorCompactionRequest { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMajorCompactionRequest.class); protected static final HBaseTestingUtil UTILITY = new HBaseTestingUtil(); protected static final String FAMILY = "a"; protected Path rootRegionDir; protected Path regionStoreDir; - @Before + @BeforeEach public void setUp() throws Exception { rootRegionDir = UTILITY.getDataTestDirOnTestFS("TestMajorCompactionRequest"); regionStoreDir = new Path(rootRegionDir, FAMILY); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionTTLRequest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionTTLRequest.java index b3fab4c0b5c1..5f5516a03872 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionTTLRequest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionTTLRequest.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.util.compaction; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -29,7 +29,6 @@ import java.util.List; import java.util.Optional; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.RegionInfo; @@ -37,21 +36,17 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerForTest; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) public class TestMajorCompactionTTLRequest extends TestMajorCompactionRequest { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMajorCompactionTTLRequest.class); - @Before + @BeforeEach @Override public void setUp() throws Exception { rootRegionDir = UTILITY.getDataTestDirOnTestFS("TestMajorCompactionTTLRequest"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactor.java index f2b7e783f737..0faf345d57b2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactor.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.hbase.util.compaction; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -30,39 +29,37 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestMajorCompactor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMajorCompactor.class); public static final byte[] FAMILY = Bytes.toBytes("a"); protected HBaseTestingUtil utility; protected Admin admin; - @Before + @BeforeEach public void setUp() throws Exception { utility = new HBaseTestingUtil(); utility.getConfiguration().setInt("hbase.hfile.compaction.discharger.interval", 10); utility.startMiniCluster(); } - @After + @AfterEach public void tearDown() throws Exception { utility.shutdownMiniCluster(); } @Test - public void testCompactingATable() throws Exception { - TableName tableName = TableName.valueOf("TestMajorCompactor"); + public void testCompactingATable(TestInfo testInfo) throws Exception { + TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); utility.createMultiRegionTable(tableName, FAMILY, 5); utility.waitTableAvailable(tableName); Connection connection = utility.getConnection(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactorTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactorTTL.java index e93375b5b7b8..1d0dfb3ba58d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactorTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactorTTL.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.util.compaction; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -31,24 +30,17 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) public class TestMajorCompactorTTL extends TestMajorCompactor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMajorCompactorTTL.class); - @Rule - public TestName name = new TestName(); - - @Before + @BeforeEach @Override public void setUp() throws Exception { utility = new HBaseTestingUtil(); @@ -57,15 +49,15 @@ public void setUp() throws Exception { admin = utility.getAdmin(); } - @After + @AfterEach @Override public void tearDown() throws Exception { utility.shutdownMiniCluster(); } @Test - public void testCompactingATable() throws Exception { - TableName tableName = createTable(name.getMethodName()); + public void testCompactingATable(TestInfo testInfo) throws Exception { + TableName tableName = createTable(testInfo.getTestMethod().get().getName()); // Delay a bit, so we can set the table TTL to 5 seconds Thread.sleep(10 * 1000); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/CompressedWALTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/CompressedWALTestBase.java index 93714111d8da..9329203126db 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/CompressedWALTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/CompressedWALTestBase.java @@ -19,8 +19,8 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasSize; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.List; @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; +import org.junit.jupiter.api.TestTemplate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,12 +71,12 @@ public abstract class CompressedWALTestBase { Arrays.fill(VALUE, off, (off += 1597), (byte) 'Q'); } - @Test + @TestTemplate public void test() throws Exception { testForSize(1000); } - @Test + @TestTemplate public void testLarge() throws Exception { testForSize(1024 * 1024); } @@ -132,21 +132,21 @@ public void doTest(TableName tableName, int valueSize) throws Exception { assertThat("Should be two KVs per WALEdit", cells, hasSize(2)); Cell putCell = cells.get(0); assertEquals(Cell.Type.Put, putCell.getType()); - assertTrue("Incorrect row", Bytes.equals(putCell.getRowArray(), putCell.getRowOffset(), - putCell.getRowLength(), row, 0, row.length)); - assertTrue("Incorrect family", Bytes.equals(putCell.getFamilyArray(), - putCell.getFamilyOffset(), putCell.getFamilyLength(), family, 0, family.length)); - assertTrue("Incorrect value", Bytes.equals(putCell.getValueArray(), - putCell.getValueOffset(), putCell.getValueLength(), value, 0, value.length)); + assertTrue(Bytes.equals(putCell.getRowArray(), putCell.getRowOffset(), + putCell.getRowLength(), row, 0, row.length), "Incorrect row"); + assertTrue(Bytes.equals(putCell.getFamilyArray(), putCell.getFamilyOffset(), + putCell.getFamilyLength(), family, 0, family.length), "Incorrect family"); + assertTrue(Bytes.equals(putCell.getValueArray(), putCell.getValueOffset(), + putCell.getValueLength(), value, 0, value.length), "Incorrect value"); Cell deleteCell = cells.get(1); assertEquals(Cell.Type.DeleteFamily, deleteCell.getType()); - assertTrue("Incorrect row", Bytes.equals(deleteCell.getRowArray(), - deleteCell.getRowOffset(), deleteCell.getRowLength(), row, 0, row.length)); - assertTrue("Incorrect family", Bytes.equals(deleteCell.getFamilyArray(), - deleteCell.getFamilyOffset(), deleteCell.getFamilyLength(), family, 0, family.length)); + assertTrue(Bytes.equals(deleteCell.getRowArray(), deleteCell.getRowOffset(), + deleteCell.getRowLength(), row, 0, row.length), "Incorrect row"); + assertTrue(Bytes.equals(deleteCell.getFamilyArray(), deleteCell.getFamilyOffset(), + deleteCell.getFamilyLength(), family, 0, family.length), "Incorrect family"); } - assertEquals("Should have read back as many KVs as written", total, count); + assertEquals(total, count, "Should have read back as many KVs as written"); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestAsyncFSWALCorruptionDueToDanglingByteBuffer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestAsyncFSWALCorruptionDueToDanglingByteBuffer.java index 0d44ead064dd..8fa3cb4f680e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestAsyncFSWALCorruptionDueToDanglingByteBuffer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestAsyncFSWALCorruptionDueToDanglingByteBuffer.java @@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor; import org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL; import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; @@ -31,10 +30,9 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; import org.apache.hbase.thirdparty.io.netty.channel.Channel; import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup; @@ -42,14 +40,11 @@ /** * Testcase for HBASE-22539 */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestAsyncFSWALCorruptionDueToDanglingByteBuffer extends WALCorruptionDueToDanglingByteBufferTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestAsyncFSWALCorruptionDueToDanglingByteBuffer.class); - public static final class PauseWAL extends AsyncFSWAL { public PauseWAL(FileSystem fs, Path rootDir, String logDir, String archiveDir, @@ -97,7 +92,7 @@ protected void doInit(Configuration conf) throws IOException { } } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { UTIL.getConfiguration().setClass(WALFactory.WAL_PROVIDER, PauseWALProvider.class, WALProvider.class); @@ -106,7 +101,7 @@ public static void setUp() throws Exception { UTIL.waitTableAvailable(TABLE_NAME); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWAL.java index faf244b5db79..4bce2a458f92 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWAL.java @@ -17,44 +17,39 @@ */ package org.apache.hadoop.hbase.wal; -import java.util.Arrays; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; + +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: provider={0}") public class TestCompressedWAL extends CompressedWALTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCompressedWAL.class); - - @Parameter public String walProvider; - @Parameters(name = "{index}: provider={0}") - public static Iterable data() { - return Arrays.asList(new Object[] { "defaultProvider" }, new Object[] { "asyncfs" }); + public TestCompressedWAL(String walProvider) { + this.walProvider = walProvider; + } + + public static Stream parameters() { + return Stream.of(Arguments.of("defaultProvider"), Arguments.of("asyncfs")); } - @Before + @BeforeEach public void setUp() throws Exception { TEST_UTIL.getConfiguration().set(WALFactory.WAL_PROVIDER, walProvider); TEST_UTIL.getConfiguration().setBoolean(HConstants.ENABLE_WAL_COMPRESSION, true); TEST_UTIL.startMiniDFSCluster(3); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWALValueCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWALValueCompression.java index 598fd9f7a9ed..10b33e858d9e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWALValueCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestCompressedWALValueCompression.java @@ -17,42 +17,35 @@ */ package org.apache.hadoop.hbase.wal; -import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.regionserver.wal.CompressionContext; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; + +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: compression={0}") public class TestCompressedWALValueCompression extends CompressedWALTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCompressedWALValueCompression.class); - - @Parameters(name = "{index}: compression={0}") - public static List params() { - return HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS_PARAMETERIZED; - } - private final Compression.Algorithm compression; public TestCompressedWALValueCompression(Compression.Algorithm algo) { this.compression = algo; } - @Before + public static Stream parameters() { + return HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS_PARAMETERIZED.stream().map(Arguments::of); + } + + @BeforeEach public void setUp() throws Exception { TEST_UTIL.getConfiguration().setBoolean(HConstants.ENABLE_WAL_COMPRESSION, true); TEST_UTIL.getConfiguration().setBoolean(CompressionContext.ENABLE_WAL_VALUE_COMPRESSION, true); @@ -61,7 +54,7 @@ public void setUp() throws Exception { TEST_UTIL.startMiniDFSCluster(3); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java index 54eb867c2ccb..2b4c82635fa4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java @@ -17,12 +17,11 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; @@ -32,35 +31,27 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestDisabledWAL { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestDisabledWAL.class); - - @Rule - public TestName name = new TestName(); - private static final Logger LOG = LoggerFactory.getLogger(TestDisabledWAL.class); static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private Table table; private TableName tableName; private byte[] fam = Bytes.toBytes("f1"); - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.regionserver.hlog.enabled", false); @@ -72,19 +63,20 @@ public static void beforeClass() throws Exception { } } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setup() throws Exception { - tableName = TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_")); + @BeforeEach + public void setup(TestInfo testInfo) throws Exception { + tableName = + TableName.valueOf(testInfo.getTestMethod().get().getName().replaceAll("[^a-zA-Z0-9]", "_")); LOG.info("Creating table " + tableName); table = TEST_UTIL.createTable(tableName, fam); } - @After + @AfterEach public void cleanup() throws Exception { LOG.info("Deleting table " + tableName); TEST_UTIL.deleteTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionDueToDanglingByteBuffer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionDueToDanglingByteBuffer.java index 1d9c870276ef..5cf0b6c3f299 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionDueToDanglingByteBuffer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionDueToDanglingByteBuffer.java @@ -22,28 +22,23 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.regionserver.wal.FSHLog; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Testcase for HBASE-22539 */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestFSHLogCorruptionDueToDanglingByteBuffer extends WALCorruptionDueToDanglingByteBufferTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSHLogCorruptionDueToDanglingByteBuffer.class); - public static final class PauseWAL extends FSHLog { public PauseWAL(FileSystem fs, Path rootDir, String logDir, String archiveDir, @@ -79,7 +74,7 @@ protected void doInit(Configuration conf) throws IOException { } } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { UTIL.getConfiguration().setClass(WALFactory.WAL_PROVIDER, PauseWALProvider.class, WALProvider.class); @@ -88,7 +83,7 @@ public static void setUp() throws Exception { UTIL.waitTableAvailable(TABLE_NAME); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionWithMultiPutDueToDanglingByteBuffer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionWithMultiPutDueToDanglingByteBuffer.java index b971f456ddb3..a9572b1602a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionWithMultiPutDueToDanglingByteBuffer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogCorruptionWithMultiPutDueToDanglingByteBuffer.java @@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.ipc.RpcServerFactory; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; @@ -32,19 +31,15 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestFSHLogCorruptionWithMultiPutDueToDanglingByteBuffer extends WALCorruptionWithMultiPutDueToDanglingByteBufferTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSHLogCorruptionWithMultiPutDueToDanglingByteBuffer.class); - public static final class PauseWAL extends FSHLog { private int testTableWalAppendsCount = 0; @@ -97,7 +92,7 @@ protected void doInit(Configuration conf) throws IOException { } } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { UTIL.getConfiguration().setClass(WALFactory.WAL_PROVIDER, PauseWALProvider.class, WALProvider.class); @@ -112,7 +107,7 @@ public static void setUp() throws Exception { UTIL.waitTableAvailable(TABLE_NAME); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java index ea1dfed4f1ca..4947b9e13d29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.HashSet; @@ -32,7 +32,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -49,24 +48,19 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestFSHLogProvider { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSHLogProvider.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFSHLogProvider.class); private static Configuration conf; @@ -74,11 +68,11 @@ public class TestFSHLogProvider { private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private MultiVersionConcurrencyControl mvcc; - @Rule - public final TestName currentTest = new TestName(); + private String currentTestName; - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + currentTestName = testInfo.getTestMethod().get().getName(); mvcc = new MultiVersionConcurrencyControl(); FileStatus[] entries = fs.listStatus(new Path("/")); for (FileStatus dir : entries) { @@ -86,7 +80,7 @@ public void setUp() throws Exception { } } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // Make block sizes small. TEST_UTIL.getConfiguration().setInt("dfs.blocksize", 1024 * 1024); @@ -107,7 +101,7 @@ public static void setUpBeforeClass() throws Exception { fs = TEST_UTIL.getDFSCluster().getFileSystem(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -132,15 +126,15 @@ public void testGetServerNameFromWALDirectoryName() throws IOException { ServerName parsed = AbstractFSWALProvider.getServerNameFromWALDirectoryName(conf, CommonFSUtils.getRootDir(conf).toUri().toString() + wals + sn + "/localhost%2C32984%2C1343316388997.1343316390417"); - assertEquals("standard", sn, parsed); + assertEquals(sn, parsed, "standard"); parsed = AbstractFSWALProvider.getServerNameFromWALDirectoryName(conf, hl + "/qdf"); - assertEquals("subdir", sn, parsed); + assertEquals(sn, parsed, "subdir"); parsed = AbstractFSWALProvider.getServerNameFromWALDirectoryName(conf, CommonFSUtils.getRootDir(conf).toUri().toString() + wals + sn + "-splitting/localhost%3A57020.1340474893931"); - assertEquals("split", sn, parsed); + assertEquals(sn, parsed, "split"); } private void addEdits(WAL log, RegionInfo hri, TableDescriptor htd, int times, @@ -180,11 +174,10 @@ private void waitNumRolledLogFiles(WAL wal, int expected) { } private void testLogCleaning(WALFactory wals) throws IOException { - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) - .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); + TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); TableDescriptor htd2 = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName() + "2")) + TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName + "2")) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); NavigableMap scopes1 = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { @@ -238,10 +231,10 @@ private void testLogCleaning(WALFactory wals) throws IOException { @Test public void testLogCleaning() throws Exception { - LOG.info(currentTest.getMethodName()); + LOG.info(currentTestName); Configuration localConf = new Configuration(conf); localConf.set(WALFactory.WAL_PROVIDER, FSHLogProvider.class.getName()); - WALFactory wals = new WALFactory(localConf, currentTest.getMethodName()); + WALFactory wals = new WALFactory(localConf, currentTestName); try { testLogCleaning(wals); } finally { @@ -251,10 +244,10 @@ public void testLogCleaning() throws Exception { private void testWALArchiving(WALFactory wals) throws IOException { TableDescriptor table1 = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName() + "1")) + TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName + "1")) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); TableDescriptor table2 = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName() + "2")) + TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName + "2")) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build(); NavigableMap scopes1 = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (byte[] fam : table1.getColumnFamilyNames()) { @@ -322,11 +315,11 @@ private void testWALArchiving(WALFactory wals) throws IOException { */ @Test public void testWALArchiving() throws IOException { - LOG.debug(currentTest.getMethodName()); + LOG.debug(currentTestName); Configuration localConf = new Configuration(conf); localConf.set(WALFactory.WAL_PROVIDER, FSHLogProvider.class.getName()); - WALFactory wals = new WALFactory(localConf, currentTest.getMethodName()); + WALFactory wals = new WALFactory(localConf, currentTestName); try { testWALArchiving(wals); } finally { @@ -341,18 +334,18 @@ public void testWALArchiving() throws IOException { public void setMembershipDedups() throws IOException { Configuration localConf = new Configuration(conf); localConf.set(WALFactory.WAL_PROVIDER, FSHLogProvider.class.getName()); - WALFactory wals = new WALFactory(localConf, currentTest.getMethodName()); + WALFactory wals = new WALFactory(localConf, currentTestName); try { final Set seen = new HashSet<>(1); - assertTrue("first attempt to add WAL from default provider should work.", - seen.add(wals.getWAL(null))); + assertTrue(seen.add(wals.getWAL(null)), + "first attempt to add WAL from default provider should work."); for (int i = 0; i < 1000; i++) { assertFalse( - "default wal provider is only supposed to return a single wal, which should " - + "compare as .equals itself.", seen.add(wals.getWAL(RegionInfoBuilder .newBuilder(TableName.valueOf("Table-" + ThreadLocalRandom.current().nextInt())) - .build()))); + .build())), + "default wal provider is only supposed to return a single wal, which should " + + "compare as .equals itself."); } } finally { wals.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestOutputSinkWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestOutputSinkWriter.java index fa4085e45b90..aa4a9bcc2038 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestOutputSinkWriter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestOutputSinkWriter.java @@ -17,25 +17,22 @@ */ package org.apache.hadoop.hbase.wal; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + import java.io.IOException; import java.util.List; import java.util.Map; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestOutputSinkWriter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestOutputSinkWriter.class); - @Test public void testExeptionHandling() throws IOException, InterruptedException { WALSplitter.PipelineController controller = new WALSplitter.PipelineController(); @@ -80,9 +77,9 @@ public boolean keepRegionEvent(WAL.Entry entry) { // make sure the exception is stored try { controller.checkForErrors(); - Assert.fail(); + fail(); } catch (RuntimeException re) { - Assert.assertTrue(true); + assertTrue(true); } sink.restartWriterThreadsIfNeeded(); @@ -91,7 +88,7 @@ public boolean keepRegionEvent(WAL.Entry entry) { try { controller.checkForErrors(); } catch (RuntimeException re) { - Assert.fail(); + fail(); } // prep another exception and wait for it to be thrown @@ -101,9 +98,9 @@ public boolean keepRegionEvent(WAL.Entry entry) { // make sure the exception is stored try { controller.checkForErrors(); - Assert.fail(); + fail(); } catch (RuntimeException re) { - Assert.assertTrue(true); + assertTrue(true); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestParsePartialWALFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestParsePartialWALFile.java index dc267058183e..162b9c4dfc77 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestParsePartialWALFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestParsePartialWALFile.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.EOFException; import java.io.IOException; @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -46,11 +45,10 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * In this test, we write a small WAL file first, and then generate partial WAL file which length is @@ -66,13 +64,10 @@ * replication will be stuck. * */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestParsePartialWALFile { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestParsePartialWALFile.class); - private static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil(); private static FileSystem FS; @@ -84,7 +79,7 @@ public class TestParsePartialWALFile { private static byte[] QUAL = Bytes.toBytes("qualifier"); private static byte[] VALUE = Bytes.toBytes("value"); - @BeforeClass + @BeforeAll public static void setUp() throws IOException { UTIL.getConfiguration().setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, false); FS = FileSystem.getLocal(UTIL.getConfiguration()); @@ -93,7 +88,7 @@ public static void setUp() throws IOException { } } - @AfterClass + @AfterAll public static void tearDown() { UTIL.cleanupTestDir(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRaceBetweenGetWALAndGetWALs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRaceBetweenGetWALAndGetWALs.java index 86f98890e32f..7b183bd4ff20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRaceBetweenGetWALAndGetWALs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRaceBetweenGetWALAndGetWALs.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertSame; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertSame; import java.io.IOException; import java.util.List; @@ -26,14 +26,12 @@ import java.util.concurrent.ForkJoinPool; import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Threads; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; @@ -41,13 +39,10 @@ /** * Testcase for HBASE-21503. */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestRaceBetweenGetWALAndGetWALs { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRaceBetweenGetWALAndGetWALs.class); - private static Future> GET_WALS_FUTURE; private static final class FSWALProvider extends AbstractFSWALProvider> { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestReadWriteSeqIdFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestReadWriteSeqIdFiles.java index c08cddb0fd44..fa7e96c88778 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestReadWriteSeqIdFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestReadWriteSeqIdFiles.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.NavigableSet; @@ -25,26 +25,21 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestReadWriteSeqIdFiles { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadWriteSeqIdFiles.class); - private static final Logger LOG = LoggerFactory.getLogger(TestReadWriteSeqIdFiles.class); private static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil(); @@ -53,13 +48,13 @@ public class TestReadWriteSeqIdFiles { private static Path REGION_DIR; - @BeforeClass + @BeforeAll public static void setUp() throws IOException { walFS = FileSystem.getLocal(UTIL.getConfiguration()); REGION_DIR = UTIL.getDataTestDir(); } - @AfterClass + @AfterAll public static void tearDown() throws IOException { UTIL.cleanupTestDir(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRecoveredEditsOutputSink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRecoveredEditsOutputSink.java index 06ed79a04a72..42f2116a7e48 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRecoveredEditsOutputSink.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestRecoveredEditsOutputSink.java @@ -17,32 +17,27 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.IOException; import java.io.InterruptedIOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestRecoveredEditsOutputSink { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRecoveredEditsOutputSink.class); - private static WALFactory wals; private static FileSystem fs; private static Path rootDir; @@ -50,7 +45,7 @@ public class TestRecoveredEditsOutputSink { private static RecoveredEditsOutputSink outputSink; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.set(WALFactory.WAL_PROVIDER, "filesystem"); @@ -63,7 +58,7 @@ public static void setUpBeforeClass() throws Exception { outputSink = new RecoveredEditsOutputSink(splitter, pipelineController, sink, 3); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { wals.close(); fs.delete(rootDir, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java index 3a27a2f4e0e2..0c0c8b122e4a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java @@ -17,21 +17,21 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.util.Arrays; import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; +import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -45,41 +45,34 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; + +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate public class TestSecureWAL { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSecureWAL.class); - static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Rule - public TestName name = new TestName(); + private String testMethodName; - @Parameter public String walProvider; - @Parameters(name = "{index}: provider={0}") - public static Iterable data() { - return Arrays.asList(new Object[] { "defaultProvider" }, new Object[] { "asyncfs" }); + public TestSecureWAL(String walProvider) { + this.walProvider = walProvider; + } + + public static Stream parameters() { + return Stream.of(Arguments.of("defaultProvider"), Arguments.of("asyncfs")); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName()); @@ -89,19 +82,20 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniDFSCluster(3); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before - public void setUp() { + @BeforeEach + public void setUp(TestInfo testInfo) { + testMethodName = testInfo.getTestMethod().get().getName(); TEST_UTIL.getConfiguration().set(WALFactory.WAL_PROVIDER, walProvider); } - @Test + @TestTemplate public void testSecureWAL() throws Exception { - TableName tableName = TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_")); + TableName tableName = TableName.valueOf(testMethodName.replaceAll("[^a-zA-Z0-9]", "_")); NavigableMap scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); scopes.put(tableName.getName(), 0); RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).build(); @@ -134,7 +128,7 @@ public void testSecureWAL() throws Exception { byte[] fileData = new byte[(int) length]; IOUtils.readFully(in, fileData); in.close(); - assertFalse("Cells appear to be plaintext", Bytes.contains(fileData, value)); + assertFalse(Bytes.contains(fileData, value), "Cells appear to be plaintext"); // Confirm the WAL can be read back int count = 0; @@ -143,17 +137,17 @@ public void testSecureWAL() throws Exception { while (reader.next(entry) != null) { count++; List cells = entry.getEdit().getCells(); - assertTrue("Should be one KV per WALEdit", cells.size() == 1); + assertTrue(cells.size() == 1, "Should be one KV per WALEdit"); for (Cell cell : cells) { - assertTrue("Incorrect row", Bytes.equals(cell.getRowArray(), cell.getRowOffset(), - cell.getRowLength(), row, 0, row.length)); - assertTrue("Incorrect family", Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(), - cell.getFamilyLength(), family, 0, family.length)); - assertTrue("Incorrect value", Bytes.equals(cell.getValueArray(), cell.getValueOffset(), - cell.getValueLength(), value, 0, value.length)); + assertTrue(Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, + 0, row.length), "Incorrect row"); + assertTrue(Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength(), family, 0, family.length), "Incorrect family"); + assertTrue(Bytes.equals(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength(), value, 0, value.length), "Incorrect value"); } } - assertEquals("Should have read back as many KVs as written", total, count); + assertEquals(total, count, "Should have read back as many KVs as written"); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index 9558ae60246a..e79b03eeef8b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -19,13 +19,13 @@ import static org.apache.hadoop.hbase.wal.WALFactory.META_WAL_PROVIDER; import static org.apache.hadoop.hbase.wal.WALFactory.WAL_PROVIDER; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.InputStream; @@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -77,28 +76,23 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.HdfsConstants; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * WAL tests that can be reused across providers. */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestWALFactory { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALFactory.class); - private static final Logger LOG = LoggerFactory.getLogger(TestWALFactory.class); protected static Configuration conf; @@ -112,18 +106,18 @@ public class TestWALFactory { protected WALFactory wals; private ServerName currentServername; - @Rule - public final TestName currentTest = new TestName(); + private String currentTestName; - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + currentTestName = testInfo.getTestMethod().get().getName(); fs = cluster.getFileSystem(); - dir = new Path(hbaseDir, currentTest.getMethodName()); - this.currentServername = ServerName.valueOf(currentTest.getMethodName(), 16010, 1); + dir = new Path(hbaseDir, currentTestName); + this.currentServername = ServerName.valueOf(currentTestName, 16010, 1); wals = new WALFactory(conf, this.currentServername.toString()); } - @After + @AfterEach public void tearDown() throws Exception { // testAppendClose closes the FileSystem, which will prevent us from closing cleanly here. try { @@ -139,7 +133,7 @@ public void tearDown() throws Exception { } } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { CommonFSUtils.setWALRootDir(TEST_UTIL.getConfiguration(), new Path("file:///tmp/wal")); // Make block sizes small. @@ -167,7 +161,7 @@ public static void setUpBeforeClass() throws Exception { hbaseWALDir = TEST_UTIL.createWALRootDir(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -182,7 +176,7 @@ public void canCloseSingleton() throws IOException { */ @Test public void testSplit() throws IOException { - final TableName tableName = TableName.valueOf(currentTest.getMethodName()); + final TableName tableName = TableName.valueOf(currentTestName); final byte[] rowName = tableName.getName(); final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(1); final int howmany = 3; @@ -238,10 +232,10 @@ public void testSplit() throws IOException { */ @Test public void Broken_testSync() throws Exception { - TableName tableName = TableName.valueOf(currentTest.getMethodName()); + TableName tableName = TableName.valueOf(currentTestName); MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(1); // First verify that using streams all works. - Path p = new Path(dir, currentTest.getMethodName() + ".fsdos"); + Path p = new Path(dir, currentTestName + ".fsdos"); FSDataOutputStream out = fs.create(p); out.write(tableName.getName()); Method syncMethod = null; @@ -351,7 +345,7 @@ private void verifySplits(final List splits, final int howmany) throws IOE */ @Test public void testAppendClose() throws Exception { - TableName tableName = TableName.valueOf(currentTest.getMethodName()); + TableName tableName = TableName.valueOf(currentTestName); RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).build(); WAL wal = wals.getWAL(regionInfo); @@ -453,7 +447,7 @@ public void run() { WAL.Entry entry = new WAL.Entry(); while (reader.next(entry) != null) { count++; - assertTrue("Should be one KeyValue per WALEdit", entry.getEdit().getCells().size() == 1); + assertTrue(entry.getEdit().getCells().size() == 1, "Should be one KeyValue per WALEdit"); } } assertEquals(total, count); @@ -468,9 +462,8 @@ public void run() { @Test public void testEditAdd() throws IOException { int colCount = 10; - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) - .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); + TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); NavigableMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { scopes.put(fam, 0); @@ -520,9 +513,8 @@ public void testEditAdd() throws IOException { @Test public void testAppend() throws IOException { int colCount = 10; - TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) - .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); + TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); NavigableMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); for (byte[] fam : htd.getColumnFamilyNames()) { scopes.put(fam, 0); @@ -570,7 +562,7 @@ public void testAppend() throws IOException { @Test public void testVisitors() throws Exception { final int COL_COUNT = 10; - final TableName tableName = TableName.valueOf(currentTest.getMethodName()); + final TableName tableName = TableName.valueOf(currentTestName); final byte[] row = Bytes.toBytes("row"); final DumbWALActionsListener visitor = new DumbWALActionsListener(); final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(1); @@ -751,7 +743,7 @@ public FSDataInputStream open(Path p, int blockSize) throws IOException { }; final TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTest.getMethodName())) + TableDescriptorBuilder.newBuilder(TableName.valueOf(currentTestName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("column")).build(); final RegionInfo hri = RegionInfoBuilder.newBuilder(htd.getTableName()).build(); @@ -780,14 +772,14 @@ public FSDataInputStream open(Path p, int blockSize) throws IOException { // Now open a reader on the log which will throw an exception when // we try to instantiate the custom Codec. Path filename = AbstractFSWALProvider.getCurrentFileName(log); - assertThrows("Expected to see an exception when creating WAL reader", IOException.class, - () -> customFactory.createStreamReader(proxyFs, filename)); + assertThrows(IOException.class, () -> customFactory.createStreamReader(proxyFs, filename), + "Expected to see an exception when creating WAL reader"); // We should have exactly one reader assertEquals(1, openedReaders.size()); // And that reader should be closed. long unclosedReaders = openedReaders.stream().filter((r) -> !r.isClosed.get()).collect(Collectors.counting()); - assertEquals("Should not find any open readers", 0, unclosedReaders); + assertEquals(0, unclosedReaders, "Should not find any open readers"); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java index f411be3f5dcb..368f35391c1f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.List; import java.util.Random; import java.util.SortedMap; import java.util.TreeMap; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; @@ -36,11 +35,10 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; @@ -49,13 +47,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestWALFiltering { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALFiltering.class); - private static final int NUM_RS = 4; private static final TableName TABLE_NAME = TableName.valueOf("TestWALFiltering"); @@ -65,13 +60,13 @@ public class TestWALFiltering { private HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Before + @BeforeEach public void setUp() throws Exception { TEST_UTIL.startMiniCluster(NUM_RS); fillTable(); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java index 6a1e98d9fd5d..2470c0761c45 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.NavigableSet; @@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueTestUtil; @@ -42,20 +41,16 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.WALSplitter.PipelineController; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Simple testing of a few WAL methods. */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestWALMethods { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALMethods.class); - private static final byte[] TEST_REGION = Bytes.toBytes("test_region"); private static final TableName TEST_TABLE = TableName.valueOf("test_table"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALOpenAfterDNRollingStart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALOpenAfterDNRollingStart.java index 661d020d0e8f..69ed11d2d1c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALOpenAfterDNRollingStart.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALOpenAfterDNRollingStart.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hbase.wal; import java.io.IOException; -import java.util.Arrays; -import java.util.List; +import java.util.stream.Stream; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.HRegionServer; @@ -29,26 +28,19 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate public class TestWALOpenAfterDNRollingStart { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALOpenAfterDNRollingStart.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); // Sleep time before restart next dn, we need to wait the current dn to finish start up private static long DN_RESTART_INTERVAL = 15000; @@ -58,15 +50,17 @@ public class TestWALOpenAfterDNRollingStart { // so a low replication case will be detected and the wal will be rolled private static long CHECK_LOW_REPLICATION_INTERVAL = 10000; - @Parameter public String walProvider; - @Parameters(name = "{index}: wal={0}") - public static List data() { - return Arrays.asList(new Object[] { "asyncfs" }, new Object[] { "filesystem" }); + public TestWALOpenAfterDNRollingStart(String walProvider) { + this.walProvider = walProvider; + } + + public static Stream parameters() { + return Stream.of(Arguments.of("asyncfs"), Arguments.of("filesystem")); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // don't let hdfs client to choose a new replica when dn down TEST_UTIL.getConfiguration() @@ -77,18 +71,18 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniZKCluster(); } - @Before + @BeforeEach public void setUp() throws IOException, InterruptedException { TEST_UTIL.getConfiguration().set("hbase.wal.provider", walProvider); TEST_UTIL.startMiniHBaseCluster(); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniHBaseCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -100,7 +94,7 @@ public static void tearDownAfterClass() throws Exception { * never know all the replica of the wal is broken(because of dn restarting). And this wal can * never be open */ - @Test + @TestTemplate public void test() throws Exception { HRegionServer server = TEST_UTIL.getHBaseCluster().getRegionServer(0); AbstractFSWAL wal = (AbstractFSWAL) server.getWAL(null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALProvider.java index 68c9e461b2b8..22fbae49318e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALProvider.java @@ -17,23 +17,19 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Comparator; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestWALProvider { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALProvider.class); /** * Test start time comparator. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java index a3e6a79c3fb8..fa1a73582d9f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -39,22 +38,17 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestWALRootDir { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALRootDir.class); - private static final Logger LOG = LoggerFactory.getLogger(TestWALRootDir.class); private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static Configuration conf; @@ -67,12 +61,12 @@ public class TestWALRootDir { private static Path rootDir; private static WALFactory wals; - @Before + @BeforeEach public void setUp() throws Exception { cleanup(); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); TEST_UTIL.startMiniDFSCluster(1); @@ -82,7 +76,7 @@ public static void setUpBeforeClass() throws Exception { walFs = CommonFSUtils.getWALFileSystem(conf); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { cleanup(); TEST_UTIL.shutdownMiniDFSCluster(); @@ -102,7 +96,7 @@ public void testWALRootDir() throws Exception { long txid = log.appendData(regionInfo, getWalKey(EnvironmentEdgeManager.currentTime(), regionInfo, 0), edit); log.sync(txid); - assertEquals("Expect 1 log have been created", 1, getWALFiles(walFs, walRootDir).size()); + assertEquals(1, getWALFiles(walFs, walRootDir).size(), "Expect 1 log have been created"); log.rollWriter(); // Create 1 more WAL assertEquals(2, @@ -115,8 +109,8 @@ public void testWALRootDir() throws Exception { log.rollWriter(); log.shutdown(); - assertEquals("Expect 3 logs in WALs dir", 3, - getWALFiles(walFs, new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME)).size()); + assertEquals(3, getWALFiles(walFs, new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME)).size(), + "Expect 3 logs in WALs dir"); } private WALKeyImpl getWalKey(final long time, RegionInfo hri, final long startPoint) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java index cc1fefc266c4..85450521fe24 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java @@ -21,11 +21,12 @@ import static org.apache.hadoop.hbase.replication.regionserver.ReplicationMarkerChore.getRowKey; import static org.apache.hadoop.hbase.wal.WALEdit.METAFAMILY; import static org.apache.hadoop.hbase.wal.WALEdit.REPLICATION_MARKER; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeFalse; import java.io.FileNotFoundException; import java.io.IOException; @@ -54,7 +55,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -86,15 +86,13 @@ import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.ipc.RemoteException; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -113,11 +111,9 @@ /** * Testing {@link WAL} splitting code. */ -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestWALSplit { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALSplit.class); private final static Logger LOG = LoggerFactory.getLogger(TestWALSplit.class); private static Configuration conf; @@ -155,7 +151,7 @@ static enum Corruptions { TRUNCATE_TRAILER } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); conf.set(WALFactory.WAL_PROVIDER, "filesystem"); @@ -173,17 +169,17 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniDFSCluster(2); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniDFSCluster(); } - @Rule - public TestName name = new TestName(); + private String testMethodName; private WALFactory wals = null; - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + testMethodName = testInfo.getTestMethod().get().getName(); LOG.info("Cleaning up cluster for new test."); fs = TEST_UTIL.getDFSCluster().getFileSystem(); HBASEDIR = TEST_UTIL.createRootDir(); @@ -196,13 +192,13 @@ public void setUp() throws Exception { REGIONS.clear(); Collections.addAll(REGIONS, "bbb", "ccc"); InstrumentedLogWriter.activateFailure = false; - wals = new WALFactory(conf, name.getMethodName()); - WALDIR = new Path(HBASELOGDIR, AbstractFSWALProvider.getWALDirectoryName(ServerName - .valueOf(name.getMethodName(), 16010, EnvironmentEdgeManager.currentTime()).toString())); + wals = new WALFactory(conf, testMethodName); + WALDIR = new Path(HBASELOGDIR, AbstractFSWALProvider.getWALDirectoryName( + ServerName.valueOf(testMethodName, 16010, EnvironmentEdgeManager.currentTime()).toString())); // fs.mkdirs(WALDIR); } - @After + @AfterEach public void tearDown() throws Exception { try { wals.close(); @@ -253,7 +249,7 @@ public Integer run() throws Exception { WALSplitter.split(HBASELOGDIR, WALDIR, OLDLOGDIR, fs, conf2, wals); LOG.info("Finished splitting out from under zombie."); Path[] logfiles = getLogForRegion(TABLE_NAME, region); - assertEquals("wrong number of split files for region", numWriters, logfiles.length); + assertEquals(numWriters, logfiles.length, "wrong number of split files for region"); int count = 0; for (Path logfile : logfiles) { count += countWAL(logfile); @@ -262,10 +258,9 @@ public Integer run() throws Exception { } }); LOG.info("zombie=" + counter.get() + ", robber=" + count); - assertTrue( + assertTrue(counter.get() == count || counter.get() + 1 == count, "The log file could have at most 1 extra log entry, but can't have less. " - + "Zombie could write " + counter.get() + " and logfile had only " + count, - counter.get() == count || counter.get() + 1 == count); + + "Zombie could write " + counter.get() + " and logfile had only " + count); } finally { stop.set(true); zombie.interrupt(); @@ -398,13 +393,13 @@ public void testTwoWorkerSplittingSameWAL() throws IOException, InterruptedExcep for (String region : REGIONS) { Path[] logfiles = getLogForRegion(TABLE_NAME, region); - assertEquals("wrong number of split files for region", numWriter, logfiles.length); + assertEquals(numWriter, logfiles.length, "wrong number of split files for region"); int count = 0; for (Path lf : logfiles) { count += countWAL(lf); } - assertEquals("wrong number of edits for region " + region, entries, count); + assertEquals(entries, count, "wrong number of edits for region " + region); } } @@ -426,7 +421,7 @@ public void run() { try { boolean ret = WALSplitter.splitLogFile(HBASEDIR, logfile, fs, conf, null, this, null, wals, rsServices); - assertTrue("Both splitting should pass", ret); + assertTrue(ret, "Both splitting should pass"); } catch (IOException e) { LOG.warn(getName() + " Worker exiting " + e); } @@ -502,7 +497,7 @@ public void testSplitPreservesEdits() throws IOException { Path[] splitLog = getLogForRegion(TABLE_NAME, REGION); assertEquals(1, splitLog.length); - assertTrue("edits differ after split", logsAreEqual(originalLog, splitLog[0])); + assertTrue(logsAreEqual(originalLog, splitLog[0]), "edits differ after split"); } @Test @@ -518,7 +513,7 @@ public void testSplitRemovesRegionEventsEdits() throws IOException { Path[] splitLog = getLogForRegion(TABLE_NAME, REGION); assertEquals(1, splitLog.length); - assertFalse("edits differ after split", logsAreEqual(originalLog, splitLog[0])); + assertFalse(logsAreEqual(originalLog, splitLog[0]), "edits differ after split"); // split log should only have the test edits assertEquals(10, countWAL(splitLog[0])); } @@ -549,7 +544,7 @@ public void testSplitLeavesCompactionEventsEdits() throws IOException { Path[] splitLog = getLogForRegion(TABLE_NAME, hri.getEncodedName()); assertEquals(1, splitLog.length); - assertFalse("edits differ after split", logsAreEqual(originalLog, splitLog[0])); + assertFalse(logsAreEqual(originalLog, splitLog[0]), "edits differ after split"); // split log should have 10 test edits plus 1 compaction marker assertEquals(11, countWAL(splitLog[0])); } @@ -666,8 +661,8 @@ public void testMiddleGarbageCorruptionSkipErrorsReadsHalfOfFile() throws IOExce int goodEntries = (NUM_WRITERS - 1) * ENTRIES; int firstHalfEntries = (int) Math.ceil(ENTRIES / 2) - 1; int allRegionsCount = splitAndCount(NUM_WRITERS, -1); - assertTrue("The file up to the corrupted area hasn't been parsed", - REGIONS.size() * (goodEntries + firstHalfEntries) <= allRegionsCount); + assertTrue(REGIONS.size() * (goodEntries + firstHalfEntries) <= allRegionsCount, + "The file up to the corrupted area hasn't been parsed"); } @Test @@ -686,8 +681,8 @@ public void testCorruptedFileGetsArchivedIfSkipErrors() throws IOException { archivedLogs.add(log.getPath().getName()); } LOG.debug(archived.toString()); - assertEquals(failureType.name() + ": expected to find all of our wals corrupt.", archivedLogs, - walDirContents); + assertEquals(archivedLogs, walDirContents, + failureType.name() + ": expected to find all of our wals corrupt."); } } @@ -720,7 +715,7 @@ private Set splitCorruptWALs(final FaultyProtobufWALStreamReader.Failure LOG.debug("no previous CORRUPTDIR to clean."); } // change to the faulty reader - wals = new WALFactory(conf, name.getMethodName()); + wals = new WALFactory(conf, testMethodName); generateWALs(-1); // Our reader will render all of these files corrupt. final Set walDirContents = new HashSet<>(); @@ -739,10 +734,11 @@ private Set splitCorruptWALs(final FaultyProtobufWALStreamReader.Failure } } - @Test(expected = IOException.class) + @Test public void testTrailingGarbageCorruptionLogFileSkipErrorsFalseThrows() throws IOException { conf.setBoolean(WALSplitter.SPLIT_SKIP_ERRORS_KEY, false); - splitCorruptWALs(FaultyProtobufWALStreamReader.FailureType.BEGINNING); + assertThrows(IOException.class, + () -> splitCorruptWALs(FaultyProtobufWALStreamReader.FailureType.BEGINNING)); } @Test @@ -753,8 +749,8 @@ public void testCorruptedLogFilesSkipErrorsFalseDoesNotTouchLogs() throws IOExce } catch (IOException e) { LOG.debug("split with 'skip errors' set to 'false' correctly threw"); } - assertEquals("if skip.errors is false all files should remain in place", NUM_WRITERS, - fs.listStatus(WALDIR).length); + assertEquals(NUM_WRITERS, fs.listStatus(WALDIR).length, + "if skip.errors is false all files should remain in place"); } private void ignoreCorruption(final Corruptions corruption, final int entryCount, @@ -809,7 +805,7 @@ public void testLogsGetArchivedAfterSplit() throws IOException { useDifferentDFSClient(); WALSplitter.split(HBASELOGDIR, WALDIR, OLDLOGDIR, fs, conf, wals); FileStatus[] archivedLogs = fs.listStatus(OLDLOGDIR); - assertEquals("wrong number of files in the archive log", NUM_WRITERS, archivedLogs.length); + assertEquals(NUM_WRITERS, archivedLogs.length, "wrong number of files in the archive log"); } @Test @@ -834,7 +830,7 @@ public void testLogDirectoryShouldBeDeletedAfterSuccessfulSplit() throws IOExcep } } - @Test(expected = IOException.class) + @Test public void testSplitWillFailIfWritingToRegionFails() throws Exception { // leave 5th log open so we could append the "trap" Writer writer = generateWALs(4); @@ -851,11 +847,10 @@ public void testSplitWillFailIfWritingToRegionFails() throws Exception { try { InstrumentedLogWriter.activateFailure = true; - WALSplitter.split(HBASELOGDIR, WALDIR, OLDLOGDIR, fs, conf, wals); - } catch (IOException e) { + IOException e = assertThrows(IOException.class, + () -> WALSplitter.split(HBASELOGDIR, WALDIR, OLDLOGDIR, fs, conf, wals)); assertTrue(e.getMessage() .contains("This exception is instrumented and should only be thrown for testing")); - throw e; } finally { InstrumentedLogWriter.activateFailure = false; } @@ -883,7 +878,7 @@ public void testIOEOnOutputThread() throws Exception { generateWALs(-1); useDifferentDFSClient(); FileStatus[] logfiles = fs.listStatus(WALDIR); - assertTrue("There should be some log file", logfiles != null && logfiles.length > 0); + assertTrue(logfiles != null && logfiles.length > 0, "There should be some log file"); // wals with no entries (like the one we don't use in the factory) // won't cause a failure since nothing will ever be written. // pick the largest one since it's most likely to have entries. @@ -895,7 +890,7 @@ public void testIOEOnOutputThread() throws Exception { largestSize = logfiles[i].getLen(); } } - assertTrue("There should be some log greater than size 0.", 0 < largestSize); + assertTrue(0 < largestSize, "There should be some log greater than size 0."); // Set up a splitter that will throw an IOE on the output side WALSplitter logSplitter = new WALSplitter(wals, conf, HBASEDIR, fs, HBASEDIR, fs, null, null, null) { @@ -1028,7 +1023,7 @@ public FSDataInputStream answer(InvocationOnMock invocation) throws Throwable { conf.setInt("hbase.splitlog.report.period", 1000); boolean ret = WALSplitter.splitLogFile(HBASEDIR, logfile, spiedFs, conf, localReporter, null, Mockito.mock(SplitLogWorkerCoordination.class), wals, null); - assertFalse("Log splitting should failed", ret); + assertFalse(ret, "Log splitting should failed"); assertTrue(count.get() > 0); } catch (IOException e) { fail("There shouldn't be any exception but: " + e.toString()); @@ -1152,7 +1147,7 @@ public Entry answer(InvocationOnMock invocation) throws Throwable { LOG.info("Got " + entry.getValue() + " output edits for region " + entry.getKey()); assertEquals((long) entry.getValue(), numFakeEdits / regions.size()); } - assertEquals("Should have as many outputs as regions", regions.size(), outputCounts.size()); + assertEquals(regions.size(), outputCounts.size(), "Should have as many outputs as regions"); } // Does leaving the writer open in testSplitDeletedRegion matter enough for two tests? @@ -1227,7 +1222,7 @@ public void testConcurrentSplitLogAndReplayRecoverEdit() throws IOException { wals.getWAL(null); FileStatus[] logfiles = fs.listStatus(WALDIR); - assertTrue("There should be some log file", logfiles != null && logfiles.length > 0); + assertTrue(logfiles != null && logfiles.length > 0, "There should be some log file"); WALSplitter logSplitter = new WALSplitter(wals, conf, HBASEDIR, fs, HBASEDIR, fs, null, null, null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java index 940248eb6f93..ad8a5a864133 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java @@ -17,21 +17,16 @@ */ package org.apache.hadoop.hbase.wal; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestWALSplitBoundedLogWriterCreation extends TestWALSplit { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALSplitBoundedLogWriterCreation.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TestWALSplit.setUpBeforeClass(); TEST_UTIL.getConfiguration().setBoolean(WALSplitter.SPLIT_WRITER_CREATION_BOUNDED, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitCompressed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitCompressed.java index 3522e9139d0e..e3973b84c6bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitCompressed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitCompressed.java @@ -17,22 +17,17 @@ */ package org.apache.hadoop.hbase.wal; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestWALSplitCompressed extends TestWALSplit { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALSplitCompressed.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TestWALSplit.setUpBeforeClass(); TEST_UTIL.getConfiguration().setBoolean(HConstants.ENABLE_WAL_COMPRESSION, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java index cf9a6f174d6e..e23a5aca45d8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java @@ -19,10 +19,10 @@ import static org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay.addRegionEdits; import static org.apache.hadoop.hbase.wal.WALSplitter.WAL_SPLIT_TO_HFILE; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.when; import java.io.IOException; @@ -40,7 +40,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -73,24 +72,20 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.Pair; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestWALSplitToHFile { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALSplitToHFile.class); private static final Logger LOG = LoggerFactory.getLogger(AbstractTestWALReplay.class); static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); @@ -109,10 +104,9 @@ public class TestWALSplitToHFile { private static final byte[] VALUE2 = Bytes.toBytes("value2"); private static final int countPerFamily = 10; - @Rule - public final TestName TEST_NAME = new TestName(); + private String testMethodName; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = UTIL.getConfiguration(); conf.setBoolean(WAL_SPLIT_TO_HFILE, true); @@ -122,30 +116,30 @@ public static void setUpBeforeClass() throws Exception { CommonFSUtils.setRootDir(conf, hbaseRootDir); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + testMethodName = testInfo.getTestMethod().get().getName(); this.conf = HBaseConfiguration.create(UTIL.getConfiguration()); this.conf.setBoolean(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS, false); this.fs = UTIL.getDFSCluster().getFileSystem(); this.rootDir = CommonFSUtils.getRootDir(this.conf); this.oldLogDir = new Path(this.rootDir, HConstants.HREGION_OLDLOGDIR_NAME); String serverName = ServerName - .valueOf(TEST_NAME.getMethodName() + "-manual", 16010, EnvironmentEdgeManager.currentTime()) - .toString(); + .valueOf(testMethodName + "-manual", 16010, EnvironmentEdgeManager.currentTime()).toString(); this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName); this.logDir = new Path(this.rootDir, logName); if (UTIL.getDFSCluster().getFileSystem().exists(this.rootDir)) { UTIL.getDFSCluster().getFileSystem().delete(this.rootDir, true); } - this.wals = new WALFactory(conf, TEST_NAME.getMethodName()); + this.wals = new WALFactory(conf, testMethodName); } - @After + @AfterEach public void tearDown() throws Exception { this.wals.close(); UTIL.getDFSCluster().getFileSystem().delete(this.rootDir, true); @@ -188,7 +182,7 @@ private WAL createWAL(FileSystem fs, Path hbaseRootDir, String logName) throws I } private Pair setupTableAndRegion() throws IOException { - final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + final TableName tableName = TableName.valueOf(testMethodName); final TableDescriptor td = createBasic3FamilyTD(tableName); final RegionInfo ri = RegionInfoBuilder.newBuilder(tableName).build(); final Path tableDir = CommonFSUtils.getTableDir(this.rootDir, tableName); @@ -215,11 +209,10 @@ public void testDifferentRootDirAndWALRootDir() throws Exception { FileSystem walFs = CommonFSUtils.getWALFileSystem(this.conf); this.oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME); String serverName = ServerName - .valueOf(TEST_NAME.getMethodName() + "-manual", 16010, EnvironmentEdgeManager.currentTime()) - .toString(); + .valueOf(testMethodName + "-manual", 16010, EnvironmentEdgeManager.currentTime()).toString(); this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName); this.logDir = new Path(walRootDir, logName); - this.wals = new WALFactory(conf, TEST_NAME.getMethodName()); + this.wals = new WALFactory(conf, testMethodName); Pair pair = setupTableAndRegion(); TableDescriptor td = pair.getFirst(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitValueCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitValueCompression.java index 83628062d480..38ddf1c0d72c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitValueCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitValueCompression.java @@ -17,23 +17,18 @@ */ package org.apache.hadoop.hbase.wal; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.wal.CompressionContext; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestWALSplitValueCompression extends TestWALSplit { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALSplitValueCompression.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.getConfiguration().setBoolean(HConstants.ENABLE_WAL_COMPRESSION, true); TEST_UTIL.getConfiguration().setBoolean(CompressionContext.ENABLE_WAL_VALUE_COMPRESSION, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitWithDeletedTableData.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitWithDeletedTableData.java index 2b3db6c42911..21ddf043956b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitWithDeletedTableData.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitWithDeletedTableData.java @@ -17,17 +17,16 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; @@ -42,26 +41,23 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestWALSplitWithDeletedTableData { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALSplitWithDeletedTableData.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setup() throws Exception { TEST_UTIL.startMiniCluster(2); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWrongMetaWALFileName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWrongMetaWALFileName.java index c57daac1206c..7a0f18bd7bbf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWrongMetaWALFileName.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWrongMetaWALFileName.java @@ -17,42 +17,37 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Testcase for HBASE-21843. Used to confirm that we use the correct name for meta wal file when * using {@link RegionGroupingProvider}. */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestWrongMetaWALFileName { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWrongMetaWALFileName.class); - private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setUp() throws Exception { UTIL.getConfiguration().set(WALFactory.WAL_PROVIDER, WALFactory.Providers.multiwal.name()); UTIL.startMiniCluster(1); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionDueToDanglingByteBufferTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionDueToDanglingByteBufferTestBase.java index 4180046d6987..375ae46e86b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionDueToDanglingByteBufferTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionDueToDanglingByteBufferTestBase.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Arrays; import java.util.concurrent.CountDownLatch; @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionWithMultiPutDueToDanglingByteBufferTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionWithMultiPutDueToDanglingByteBufferTestBase.java index 6a7cee22b6f5..a921d2442c60 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionWithMultiPutDueToDanglingByteBufferTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALCorruptionWithMultiPutDueToDanglingByteBufferTestBase.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.wal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.List; @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory;