diff --git a/.gitignore b/.gitignore index 9ab7a972d7ac..7d2c2b0e8570 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,4 @@ tmp **/.flattened-pom.xml .vscode/ **/__pycache__ +.opencode diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java index 1df5b22a7029..ccdf9958ac12 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java @@ -53,6 +53,10 @@ public boolean supportsParameter(ParameterContext pc, ExtensionContext ec) // test with wrapper type, otherwise it will always return false return Primitives.wrap(expectedType).isAssignableFrom(value.getClass()); } + // non-primitive type can accept null value + if (value == null) { + return true; + } return expectedType.isAssignableFrom(value.getClass()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java index 809ca50bd5a2..894ee921596e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java @@ -17,24 +17,20 @@ */ package org.apache.hadoop.hbase.io; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(MiscTests.TAG) public class TestByteBufferOutputStream { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestByteBufferOutputStream.class); - @Test public void testByteBufferReuse() throws IOException { byte[] someBytes = Bytes.toBytes("some bytes"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java index 77aa00ef91f9..e14298114212 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.io; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.io.InputStream; @@ -33,20 +33,16 @@ import org.apache.hadoop.fs.HasEnhancedByteBufferAccess; import org.apache.hadoop.fs.ReadOption; import org.apache.hadoop.fs.StreamCapabilities; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.io.ByteBufferPool; -import org.junit.ClassRule; import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(MiscTests.TAG) public class TestFSDataInputStreamWrapper { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFSDataInputStreamWrapper.class); - @Test public void testUnbuffer() throws Exception { InputStream pc = new ParentClass(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java index 61ebde208225..eb5f589be290 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.FileNotFoundException; import java.io.IOException; @@ -31,7 +32,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -40,21 +40,17 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.ipc.RemoteException; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test that FileLink switches between alternate locations when the current location moves or gets * deleted. */ -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestFileLink { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFileLink.class); - @Test public void testEquals() { Path p1 = new Path("/p1"); @@ -160,7 +156,7 @@ public Configuration getConf() { } } - @Test(expected = FileNotFoundException.class) + @Test public void testLinkReadWithMissingFile() throws Exception { HBaseTestingUtility testUtil = new HBaseTestingUtility(); FileSystem fs = new MyDistributedFileSystem(); @@ -173,7 +169,9 @@ public void testLinkReadWithMissingFile() throws Exception { files.add(archivedPath); FileLink link = new FileLink(files); - link.open(fs); + assertThrows(FileNotFoundException.class, () -> { + link.open(fs); + }); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java index ce96f248564c..a9591f22b338 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java @@ -17,79 +17,69 @@ */ package org.apache.hadoop.hbase.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.regex.Matcher; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Test that FileLink switches between alternate locations when the current location moves or gets * deleted. */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHFileLink { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileLink.class); - - @Rule - public TestName name = new TestName(); - @Test - public void testValidLinkNames() { + public void testValidLinkNames(TestInfo testInfo) { String validLinkNames[] = { "foo=fefefe-0123456", "ns=foo=abababa-fefefefe" }; for (String name : validLinkNames) { - Assert.assertTrue("Failed validating:" + name, name.matches(HFileLink.LINK_NAME_REGEX)); + assertTrue(name.matches(HFileLink.LINK_NAME_REGEX), "Failed validating:" + name); } for (String name : validLinkNames) { - Assert.assertTrue("Failed validating:" + name, HFileLink.isHFileLink(name)); + assertTrue(HFileLink.isHFileLink(name), "Failed validating:" + name); } - String testName = name.getMethodName() + "=fefefe-0123456"; - Assert.assertEquals(TableName.valueOf(name.getMethodName()), + String testName = testInfo.getTestMethod().get().getName() + "=fefefe-0123456"; + assertEquals(TableName.valueOf(testInfo.getTestMethod().get().getName()), HFileLink.getReferencedTableName(testName)); - Assert.assertEquals("fefefe", HFileLink.getReferencedRegionName(testName)); - Assert.assertEquals("0123456", HFileLink.getReferencedHFileName(testName)); - Assert.assertEquals(testName, - HFileLink.createHFileLinkName(TableName.valueOf(name.getMethodName()), "fefefe", "0123456")); + assertEquals("fefefe", HFileLink.getReferencedRegionName(testName)); + assertEquals("0123456", HFileLink.getReferencedHFileName(testName)); + assertEquals(testName, HFileLink.createHFileLinkName( + TableName.valueOf(testInfo.getTestMethod().get().getName()), "fefefe", "0123456")); - testName = "ns=" + name.getMethodName() + "=fefefe-0123456"; - Assert.assertEquals(TableName.valueOf("ns", name.getMethodName()), + testName = "ns=" + testInfo.getTestMethod().get().getName() + "=fefefe-0123456"; + assertEquals(TableName.valueOf("ns", testInfo.getTestMethod().get().getName()), HFileLink.getReferencedTableName(testName)); - Assert.assertEquals("fefefe", HFileLink.getReferencedRegionName(testName)); - Assert.assertEquals("0123456", HFileLink.getReferencedHFileName(testName)); - Assert.assertEquals(testName, HFileLink - .createHFileLinkName(TableName.valueOf("ns", name.getMethodName()), "fefefe", "0123456")); + assertEquals("fefefe", HFileLink.getReferencedRegionName(testName)); + assertEquals("0123456", HFileLink.getReferencedHFileName(testName)); + assertEquals(testName, HFileLink.createHFileLinkName( + TableName.valueOf("ns", testInfo.getTestMethod().get().getName()), "fefefe", "0123456")); for (String name : validLinkNames) { Matcher m = HFileLink.LINK_NAME_PATTERN.matcher(name); assertTrue(m.matches()); - Assert.assertEquals(HFileLink.getReferencedTableName(name), + assertEquals(HFileLink.getReferencedTableName(name), TableName.valueOf(m.group(1), m.group(2))); - Assert.assertEquals(HFileLink.getReferencedRegionName(name), m.group(3)); - Assert.assertEquals(HFileLink.getReferencedHFileName(name), m.group(4)); + assertEquals(HFileLink.getReferencedRegionName(name), m.group(3)); + assertEquals(HFileLink.getReferencedHFileName(name), m.group(4)); } } @Test - public void testBackReference() { + public void testBackReference(TestInfo testInfo) { Path rootDir = new Path("/root"); Path archiveDir = new Path(rootDir, ".archive"); String storeFileName = "121212"; @@ -97,8 +87,8 @@ public void testBackReference() { String encodedRegion = "FEFE"; String cf = "cf1"; - TableName refTables[] = - { TableName.valueOf(name.getMethodName()), TableName.valueOf("ns", name.getMethodName()) }; + TableName refTables[] = { TableName.valueOf(testInfo.getTestMethod().get().getName()), + TableName.valueOf("ns", testInfo.getTestMethod().get().getName()) }; for (TableName refTable : refTables) { Path refTableDir = CommonFSUtils.getTableDir(archiveDir, refTable); @@ -108,28 +98,26 @@ public void testBackReference() { String refStoreFileName = refTable.getNameAsString().replace(TableName.NAMESPACE_DELIM, '=') + "=" + encodedRegion + "-" + storeFileName; - TableName tableNames[] = { TableName.valueOf(name.getMethodName() + "1"), - TableName.valueOf("ns", name.getMethodName() + "2"), - TableName.valueOf(name.getMethodName() + ":" + name.getMethodName()) }; + TableName tableNames[] = { TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"), + TableName.valueOf("ns", testInfo.getTestMethod().get().getName() + "2"), + TableName.valueOf(testInfo.getTestMethod().get().getName() + ":" + + testInfo.getTestMethod().get().getName()) }; for (TableName tableName : tableNames) { Path tableDir = CommonFSUtils.getTableDir(rootDir, tableName); Path regionDir = HRegion.getRegionDir(tableDir, encodedRegion); Path cfDir = new Path(regionDir, cf); - // Verify back reference creation assertEquals( encodedRegion + "." + tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='), HFileLink.createBackReferenceName(CommonFSUtils.getTableName(tableDir).getNameAsString(), encodedRegion)); - // verify parsing back reference Pair parsedRef = HFileLink.parseBackReferenceName(encodedRegion + "." + tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '=')); assertEquals(parsedRef.getFirst(), tableName); assertEquals(encodedRegion, parsedRef.getSecond()); - // verify resolving back reference Path storeFileDir = new Path(refLinkDir, encodedRegion + "." + tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '=')); Path linkPath = new Path(cfDir, refStoreFileName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java index a999a4ac879c..2dd3e9e750b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.file.Paths; @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -56,27 +55,27 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category({ IOTests.class, SmallTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHalfStoreFileReader { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHalfStoreFileReader.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHalfStoreFileReader.class); private static HBaseTestingUtility TEST_UTIL; - @BeforeClass + @BeforeAll public static void setupBeforeClass() throws Exception { TEST_UTIL = new HBaseTestingUtility(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.cleanupTestDir(); } @@ -100,7 +99,7 @@ public void testHalfScanAndReseek() throws IOException, InterruptedException { fs.mkdirs(parentPath); String tableName = Paths.get(root_dir).getFileName().toString(); RegionInfo splitAHri = RegionInfoBuilder.newBuilder(TableName.valueOf(tableName)).build(); - Thread.currentThread().sleep(1000); + Thread.sleep(1000); RegionInfo splitBHri = RegionInfoBuilder.newBuilder(TableName.valueOf(tableName)).build(); Path splitAPath = new Path(new Path(root_dir, splitAHri.getRegionNameAsString()), "CF"); Path splitBPath = new Path(new Path(root_dir, splitBHri.getRegionNameAsString()), "CF"); @@ -165,21 +164,19 @@ private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, Cach (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConf); storeFileInfo.getHFileInfo().initMetaAndIndex(halfreader.getHFileReader()); halfreader.loadFileInfo(); - final HFileScanner scanner = halfreader.getScanner(false, false); - - scanner.seekTo(); - Cell curr; - do { - curr = scanner.getCell(); - KeyValue reseekKv = getLastOnCol(curr); - int ret = scanner.reseekTo(reseekKv); - assertTrue("reseek to returned: " + ret, ret > 0); - // System.out.println(curr + ": " + ret); - } while (scanner.next()); - - int ret = scanner.reseekTo(getLastOnCol(curr)); - // System.out.println("Last reseek: " + ret); - assertTrue(ret > 0); + try (HFileScanner scanner = halfreader.getScanner(false, false)) { + scanner.seekTo(); + Cell curr; + do { + curr = scanner.getCell(); + KeyValue reseekKv = getLastOnCol(curr); + int ret = scanner.reseekTo(reseekKv); + assertTrue(ret > 0, "reseek to returned: " + ret); + } while (scanner.next()); + + int ret = scanner.reseekTo(getLastOnCol(curr)); + assertTrue(ret > 0); + } halfreader.close(true); } @@ -218,8 +215,8 @@ public void testHalfScanner() throws IOException { } beforeMidKey = item; } - System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey)); - System.out.println("beforeMidKey: " + beforeMidKey); + LOG.info("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey)); + LOG.info("beforeMidKey: " + beforeMidKey); // Seek on the splitKey, should be in top, not in bottom Cell foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index 38b093b997e3..9886e009f6ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -19,8 +19,8 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.lessThan; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -42,7 +42,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; @@ -74,29 +73,25 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.ClassSize; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Testing the sizing that HeapSize offers and compares to the size given by ClassSize. */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHeapSize { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHeapSize.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHeapSize.class); // List of classes implementing HeapSize // BatchOperation, BatchUpdate, BlockIndex, Entry, Entry, HStoreKey // KeyValue, LruBlockCache, Put, WALKey - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { // Print detail on jvm so we know what is different should below test fail. RuntimeMXBean b = ManagementFactory.getRuntimeMXBean(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java index 51b9b3b7618d..2a82c77a3335 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestImmutableBytesWritable { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestImmutableBytesWritable.class); - @Test public void testHash() throws Exception { assertEquals(new ImmutableBytesWritable(Bytes.toBytes("xxabc"), 2, 3).hashCode(), @@ -54,7 +49,7 @@ public void testSpecificCompare() { ImmutableBytesWritable ibw1 = new ImmutableBytesWritable(new byte[] { 0x0f }); ImmutableBytesWritable ibw2 = new ImmutableBytesWritable(new byte[] { 0x00, 0x00 }); ImmutableBytesWritable.Comparator c = new ImmutableBytesWritable.Comparator(); - assertFalse("ibw1 < ibw2", c.compare(ibw1, ibw2) < 0); + assertFalse(c.compare(ibw1, ibw2) < 0, "ibw1 < ibw2"); } @Test @@ -103,23 +98,22 @@ private void doComparisonsOnRaw(ImmutableBytesWritable a, ImmutableBytesWritable a.write(new DataOutputStream(baosA)); b.write(new DataOutputStream(baosB)); - assertEquals("Comparing " + a + " and " + b + " as raw", signum(comparator - .compare(baosA.toByteArray(), 0, baosA.size(), baosB.toByteArray(), 0, baosB.size())), - expectedSignum); + assertEquals(signum(comparator.compare(baosA.toByteArray(), 0, baosA.size(), + baosB.toByteArray(), 0, baosB.size())), expectedSignum, + "Comparing " + a + " and " + b + " as raw"); - assertEquals( - "Comparing " + a + " and " + b + " as raw (inverse)", -signum(comparator - .compare(baosB.toByteArray(), 0, baosB.size(), baosA.toByteArray(), 0, baosA.size())), - expectedSignum); + assertEquals(-signum(comparator.compare(baosB.toByteArray(), 0, baosB.size(), + baosA.toByteArray(), 0, baosA.size())), expectedSignum, + "Comparing " + a + " and " + b + " as raw (inverse)"); } private void doComparisonsOnObjects(ImmutableBytesWritable a, ImmutableBytesWritable b, int expectedSignum) { ImmutableBytesWritable.Comparator comparator = new ImmutableBytesWritable.Comparator(); - assertEquals("Comparing " + a + " and " + b + " as objects", signum(comparator.compare(a, b)), - expectedSignum); - assertEquals("Comparing " + a + " and " + b + " as objects (inverse)", - -signum(comparator.compare(b, a)), expectedSignum); + assertEquals(signum(comparator.compare(a, b)), expectedSignum, + "Comparing " + a + " and " + b + " as objects"); + assertEquals(-signum(comparator.compare(b, a)), expectedSignum, + "Comparing " + a + " and " + b + " as objects (inverse)"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java index 7e9217d09c0a..7f3ae8ff2b41 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java @@ -18,20 +18,16 @@ package org.apache.hadoop.hbase.io; import org.apache.hadoop.hbase.CompatibilityFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.test.MetricsAssertHelper; +import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(MiscTests.TAG) public class TestMetricsIO { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsIO.class); - public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java index 76a411d390bc..39a63def8f46 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.io.compress; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; import java.util.Random; @@ -86,11 +86,11 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress HFile.Reader reader = HFile.createReader(FS, path, cacheConf, true, conf); try { scanner = reader.getScanner(conf, false, false); - assertTrue("Initial seekTo failed", scanner.seekTo()); + assertTrue(scanner.seekTo(), "Initial seekTo failed"); do { Cell kv = scanner.getCell(); - assertTrue("Read back an unexpected or invalid KV", - testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); + assertTrue(testKvs.contains(KeyValueUtil.ensureKeyValue(kv)), + "Read back an unexpected or invalid KV"); i++; } while (scanner.next()); } finally { @@ -98,7 +98,7 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress scanner.close(); } - assertEquals("Did not read back as many KVs as written", i, testKvs.size()); + assertEquals(i, testKvs.size(), "Did not read back as many KVs as written"); // Test random seeks with pread Random rand = ThreadLocalRandom.current(); @@ -106,10 +106,10 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress reader = HFile.createReader(FS, path, cacheConf, true, conf); try { scanner = reader.getScanner(conf, false, true); - assertTrue("Initial seekTo failed", scanner.seekTo()); + assertTrue(scanner.seekTo(), "Initial seekTo failed"); for (i = 0; i < 100; i++) { KeyValue kv = testKvs.get(rand.nextInt(testKvs.size())); - assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv)); + assertEquals(0, scanner.seekTo(kv), "Unable to find KV as expected: " + kv); } } finally { scanner.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java index 68ce5e359f56..698a0664cf80 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; @@ -37,17 +36,13 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestBufferedDataBlockEncoder { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBufferedDataBlockEncoder.class); - byte[] row1 = Bytes.toBytes("row1"); byte[] row2 = Bytes.toBytes("row2"); byte[] row_1_0 = Bytes.toBytes("row10"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java index ab837f4d5cae..6e28d478b287 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -49,24 +48,20 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tests changing data block encoding settings of a column family. */ -@Category({ IOTests.class, LargeTests.class }) +@Tag(IOTests.TAG) +@Tag(LargeTests.TAG) public class TestChangingEncoding { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestChangingEncoding.class); - private static final Logger LOG = LoggerFactory.getLogger(TestChangingEncoding.class); static final String CF = "EncodingTestCF"; static final byte[] CF_BYTES = Bytes.toBytes(CF); @@ -104,7 +99,7 @@ private void prepareTest(String testId) throws IOException { numBatchesWritten = 0; } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // Use a small flush size to create more HFiles. conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024); @@ -117,7 +112,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java index a502113325fa..dc0dc5be7721 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.DataInputStream; @@ -27,18 +27,18 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -55,13 +55,8 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.RedundantKVGenerator; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -69,14 +64,12 @@ * Test all of the data block encoding algorithms for correctness. Most of the class generate data * which will test different branches in code. */ -@Category({ IOTests.class, LargeTests.class }) -@RunWith(Parameterized.class) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate( + name = "{index}: includesMemstoreTS={0}, includesTags={1}, useOffheapData={2}") public class TestDataBlockEncoders { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestDataBlockEncoders.class); - private static final Logger LOG = LoggerFactory.getLogger(TestDataBlockEncoders.class); private static int NUMBER_OF_KV = 10000; @@ -92,9 +85,8 @@ public class TestDataBlockEncoders { private final boolean includesTags; private final boolean useOffheapData; - @Parameters - public static Collection parameters() { - return HBaseTestingUtility.memStoreTSTagsAndOffheapCombination(); + public static Stream parameters() { + return HBaseTestingUtility.memStoreTSTagsAndOffheapCombination().stream().map(Arguments::of); } public TestDataBlockEncoders(boolean includesMemstoreTS, boolean includesTag, @@ -120,7 +112,7 @@ private HFileBlockEncodingContext getEncodingContext(Configuration conf, /** * Test data block encoding of empty KeyValue. On test failure. */ - @Test + @TestTemplate public void testEmptyKeyValues() throws IOException { List kvList = new ArrayList<>(); byte[] row = new byte[0]; @@ -144,7 +136,7 @@ public void testEmptyKeyValues() throws IOException { /** * Test KeyValues with negative timestamp. On test failure. */ - @Test + @TestTemplate public void testNegativeTimestamps() throws IOException { List kvList = new ArrayList<>(); byte[] row = new byte[0]; @@ -169,7 +161,7 @@ public void testNegativeTimestamps() throws IOException { * Test whether compression -> decompression gives the consistent results on pseudorandom sample. * @throws IOException On test failure. */ - @Test + @TestTemplate public void testExecutionOnSample() throws IOException { List kvList = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); testEncodersOnDataset(kvList, includesMemstoreTS, includesTags); @@ -178,7 +170,7 @@ public void testExecutionOnSample() throws IOException { /** * Test seeking while file is encoded. */ - @Test + @TestTemplate public void testSeekingOnSample() throws IOException { List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); @@ -231,7 +223,7 @@ public void testSeekingOnSample() throws IOException { LOG.info("Done"); } - @Test + @TestTemplate public void testSeekingToOffHeapKeyValueInSample() throws IOException { List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); @@ -306,7 +298,7 @@ static ByteBuffer encodeKeyValues(DataBlockEncoding encoding, List kvs return ByteBuffer.wrap(encodedData); } - @Test + @TestTemplate public void testNextOnSample() throws IOException { List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); @@ -348,7 +340,7 @@ public void testNextOnSample() throws IOException { /** * Test whether the decompression of first key is implemented correctly. */ - @Test + @TestTemplate public void testFirstKeyInBlockOnSample() throws IOException { List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); @@ -368,7 +360,7 @@ public void testFirstKeyInBlockOnSample() throws IOException { } } - @Test + @TestTemplate public void testRowIndexWithTagsButNoTagsInCell() throws IOException { List kvList = new ArrayList<>(); byte[] row = new byte[0]; @@ -388,7 +380,7 @@ public void testRowIndexWithTagsButNoTagsInCell() throws IOException { encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); Cell cell = seeker.getCell(); - Assert.assertEquals(expectedKV.getLength(), ((KeyValue) cell).getLength()); + assertEquals(expectedKV.getLength(), ((KeyValue) cell).getLength()); } private void checkSeekingConsistency(List encodedSeekers, @@ -452,7 +444,7 @@ private void testEncodersOnDataset(List kvList, boolean includesMemsto } } - @Test + @TestTemplate public void testZeroByte() throws IOException { List kvList = new ArrayList<>(); byte[] row = Bytes.toBytes("abcd"); @@ -489,8 +481,8 @@ private void testAlgorithm(byte[] encodedData, ByteBuffer unencodedDataBuf, // this is because in case of prefix tree the decoded stream will not have // the // mvcc in it. - assertEquals("Encoding -> decoding gives different results for " + encoder, - Bytes.toStringBinary(unencodedDataBuf), Bytes.toStringBinary(actualDataset)); + assertEquals(Bytes.toStringBinary(unencodedDataBuf), Bytes.toStringBinary(actualDataset), + "Encoding -> decoding gives different results for " + encoder); } private static ByteBufferKeyValue buildOffHeapKeyValue(KeyValue keyValue) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoding.java index 233ba4405226..384d5c44f1d1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoding.java @@ -17,22 +17,18 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(MiscTests.TAG) public class TestDataBlockEncoding { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestDataBlockEncoding.class); - @Test public void testGetDataBlockEncoder() throws Exception { for (DataBlockEncoding algo : DataBlockEncoding.values()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java index 7898ac2da5d1..aa4a80ece3f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -39,19 +38,15 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestSeekBeforeWithReverseScan { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSeekBeforeWithReverseScan.class); - private final HBaseTestingUtility testUtil = new HBaseTestingUtility(); private HRegion region; @@ -59,7 +54,7 @@ public class TestSeekBeforeWithReverseScan { private byte[] cfName = Bytes.toBytes("a"); private byte[] cqName = Bytes.toBytes("b"); - @Before + @BeforeEach public void setUp() throws Exception { TableName tableName = TableName.valueOf(getClass().getSimpleName()); HTableDescriptor htd = new HTableDescriptor(tableName); @@ -69,7 +64,7 @@ public void setUp() throws Exception { region = HBaseTestingUtility.createRegionAndWAL(info, path, testUtil.getConfiguration(), htd); } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtility.closeRegionAndWAL(region); testUtil.cleanupTestDir(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java index 43f4605a0fc1..2ba6ce32a64f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java @@ -17,18 +17,17 @@ */ package org.apache.hadoop.hbase.io.encoding; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Collection; import java.util.List; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -39,28 +38,21 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; -@Category({ IOTests.class, SmallTests.class }) -@RunWith(Parameterized.class) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "dataEncoding={0}") public class TestSeekToBlockWithEncoders { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSeekToBlockWithEncoders.class); - static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HConstants.HFILEBLOCK_HEADER_SIZE]; private final boolean useOffheapData; private final Configuration conf = HBaseConfiguration.create(); - @Parameters - public static Collection parameters() { - return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED; + public static Stream parameters() { + return Stream.of(Arguments.of(true), Arguments.of(false)); } public TestSeekToBlockWithEncoders(boolean useOffheapData) { @@ -70,7 +62,7 @@ public TestSeekToBlockWithEncoders(boolean useOffheapData) { /** * Test seeking while file is encoded. */ - @Test + @TestTemplate public void testSeekToBlockWithNonMatchingSeekKey() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -96,7 +88,7 @@ public void testSeekToBlockWithNonMatchingSeekKey() throws IOException { /** * Test seeking while file is encoded. */ - @Test + @TestTemplate public void testSeekingToBlockWithBiggerNonLength1() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -122,7 +114,7 @@ public void testSeekingToBlockWithBiggerNonLength1() throws IOException { /** * Test seeking while file is encoded. */ - @Test + @TestTemplate public void testSeekingToBlockToANotAvailableKey() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -148,7 +140,7 @@ public void testSeekingToBlockToANotAvailableKey() throws IOException { /** * Test seeking while file is encoded. */ - @Test + @TestTemplate public void testSeekToBlockWithDecreasingCommonPrefix() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("row10aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -167,7 +159,7 @@ public void testSeekToBlockWithDecreasingCommonPrefix() throws IOException { seekToTheKey(kv3, sampleKv, toSeek); } - @Test + @TestTemplate public void testSeekToBlockWithDiffQualifer() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -187,7 +179,7 @@ public void testSeekToBlockWithDiffQualifer() throws IOException { seekToTheKey(kv5, sampleKv, toSeek); } - @Test + @TestTemplate public void testSeekToBlockWithDiffQualiferOnSameRow() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -210,7 +202,7 @@ public void testSeekToBlockWithDiffQualiferOnSameRow() throws IOException { seekToTheKey(kv6, sampleKv, toSeek); } - @Test + @TestTemplate public void testSeekToBlockWithDiffQualiferOnSameRow1() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), @@ -233,7 +225,7 @@ public void testSeekToBlockWithDiffQualiferOnSameRow1() throws IOException { seekToTheKey(kv5, sampleKv, toSeek); } - @Test + @TestTemplate public void testSeekToBlockWithDiffQualiferOnSameRowButDescendingInSize() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("qual1"), @@ -256,7 +248,7 @@ public void testSeekToBlockWithDiffQualiferOnSameRowButDescendingInSize() throws seekToTheKey(kv6, sampleKv, toSeek); } - @Test + @TestTemplate public void testSeekToBlockWithDiffFamilyAndQualifer() throws IOException { List sampleKv = new ArrayList<>(); KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("fam1"), Bytes.toBytes("q1"), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java index 116d6d7bcefe..d176103a2e86 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java @@ -40,8 +40,6 @@ public KeySampler(Random random, byte[] first, byte[] last, DiscreteRNG keyLenRN int lastLen = keyPrefixToInt(last); min = Math.min(firstLen, lastLen); max = Math.max(firstLen, lastLen); - System.out.println(min); - System.out.println(max); this.keyLenRNG = keyLenRNG; } @@ -54,13 +52,13 @@ private int keyPrefixToInt(byte[] key) { public void next(BytesWritable key) { key.setSize(Math.max(MIN_KEY_LEN, keyLenRNG.nextInt())); - random.nextBytes(key.get()); + random.nextBytes(key.getBytes()); int rnd = 0; if (max != min) { rnd = random.nextInt(max - min); } int n = rnd + min; - byte[] b = key.get(); + byte[] b = key.getBytes(); b[0] = (byte) (n >> 24); b[1] = (byte) (n >> 16); b[2] = (byte) (n >> 8); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java index cda6c3c13723..0326c64d065b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java @@ -17,39 +17,34 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Map; import java.util.NavigableSet; import java.util.Objects; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.DataCacheEntry; import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.IndexCacheEntry; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestBlockCacheReporting { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBlockCacheReporting.class); - private static final Logger LOG = LoggerFactory.getLogger(TestBlockCacheReporting.class); private Configuration conf; - @Before + @BeforeEach public void setUp() throws Exception { this.conf = HBaseConfiguration.create(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockEvictionOnRegionMovement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockEvictionOnRegionMovement.java index 53c47fea0a3a..979a5f9c641d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockEvictionOnRegionMovement.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockEvictionOnRegionMovement.java @@ -19,14 +19,13 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY; import static org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.StartMiniClusterOption; @@ -45,21 +44,17 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestBlockEvictionOnRegionMovement { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBlockEvictionOnRegionMovement.class); - private static final Logger LOG = LoggerFactory.getLogger(TestBlockEvictionOnRegionMovement.class); @@ -71,7 +66,7 @@ public class TestBlockEvictionOnRegionMovement { MiniHBaseCluster cluster; StartMiniClusterOption option = StartMiniClusterOption.builder().numRegionServers(2).build(); - @Before + @BeforeEach public void setup() throws Exception { conf = TEST_UTIL.getConfiguration(); testDir = TEST_UTIL.getDataTestDir(); @@ -175,7 +170,7 @@ public TableName writeDataToTable(String testName) throws IOException, Interrupt return tableName; } - @After + @AfterEach public void tearDown() throws Exception { try { TEST_UTIL.shutdownMiniCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockIOUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockIOUtils.java index cf61d574e389..90758138e181 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockIOUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockIOUtils.java @@ -24,14 +24,15 @@ import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasName; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -39,9 +40,8 @@ import static org.mockito.Mockito.when; import io.opentelemetry.api.trace.Span; -import io.opentelemetry.api.trace.StatusCode; import io.opentelemetry.context.Scope; -import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule; +import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; import io.opentelemetry.sdk.trace.data.SpanData; import java.io.DataOutputStream; import java.io.IOException; @@ -54,7 +54,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MatcherPredicate; @@ -73,34 +72,31 @@ import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; -import org.junit.rules.TestName; - -@Category({ IOTests.class, SmallTests.class }) +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.extension.RegisterExtension; + +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestBlockIOUtils { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBlockIOUtils.class); + private String methodName; - @Rule - public TestName testName = new TestName(); - - @Rule - public ExpectedException exception = ExpectedException.none(); - - @Rule - public OpenTelemetryRule otelRule = OpenTelemetryRule.create(); + @RegisterExtension + private static OpenTelemetryExtension OTEL_EXT = OpenTelemetryExtension.create(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final int NUM_TEST_BLOCKS = 2; private static final Compression.Algorithm COMPRESSION_ALGO = Compression.Algorithm.GZ; + @BeforeEach + public void setUp(TestInfo testInfo) { + methodName = testInfo.getTestMethod().get().getName(); + } + @Test public void testIsByteBufferReadable() throws IOException { FileSystem fs = TEST_UTIL.getTestFileSystem(); @@ -130,12 +126,12 @@ public void testReadFully() throws IOException { byte[] heapBuf = new byte[s.length()]; buf.get(heapBuf, 0, heapBuf.length); assertArrayEquals(Bytes.toBytes(s), heapBuf); - }, testName.getMethodName()); + }, methodName); TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.readFully"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", 11)))))))); } @@ -155,7 +151,7 @@ private void testPreadReadFullBytesInternal(boolean readAllBytes, long randomSee Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean(HConstants.HFILE_PREAD_ALL_BYTES_ENABLED_KEY, readAllBytes); FileSystem fs = TEST_UTIL.getTestFileSystem(); - Path path = new Path(TEST_UTIL.getDataTestDirOnTestFS(), testName.getMethodName()); + Path path = new Path(TEST_UTIL.getDataTestDirOnTestFS(), methodName); // give a fixed seed such we can see failure easily. Random rand = new Random(randomSeed); long totalDataBlockBytes = @@ -246,7 +242,7 @@ public void testReadWithExtra() throws IOException { out.writeBytes(s); } - Span span = TraceUtil.createSpan(testName.getMethodName()); + Span span = TraceUtil.createSpan(methodName); try (Scope ignored = span.makeCurrent()) { ByteBuff buf = new SingleByteBuff(ByteBuffer.allocate(8)); try (FSDataInputStream in = fs.open(p)) { @@ -260,14 +256,14 @@ public void testReadWithExtra() throws IOException { span.end(); } TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.readWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", 8L)))))))); - otelRule.clearSpans(); - span = TraceUtil.createSpan(testName.getMethodName()); + OTEL_EXT.clearSpans(); + span = TraceUtil.createSpan(methodName); try (Scope ignored = span.makeCurrent()) { ByteBuff buf = new MultiByteBuff(ByteBuffer.allocate(4), ByteBuffer.allocate(4), ByteBuffer.allocate(4)); @@ -282,30 +278,29 @@ public void testReadWithExtra() throws IOException { span.end(); } TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.readWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", 11L)))))))); - otelRule.clearSpans(); - span = TraceUtil.createSpan(testName.getMethodName()); + OTEL_EXT.clearSpans(); + span = TraceUtil.createSpan(methodName); try (Scope ignored = span.makeCurrent()) { ByteBuff buf = new MultiByteBuff(ByteBuffer.allocate(4), ByteBuffer.allocate(4), ByteBuffer.allocate(4)); buf.position(0).limit(12); - exception.expect(IOException.class); try (FSDataInputStream in = fs.open(p)) { - BlockIOUtils.readWithExtra(buf, in, 12, 0); - fail("Should only read 11 bytes"); + assertThrows(IOException.class, () -> BlockIOUtils.readWithExtra(buf, in, 12, 0), + "Should only read 11 bytes"); } } finally { span.end(); } TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.readWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", 11L)))))))); } @@ -322,18 +317,17 @@ public void testPositionalReadNoExtra() throws IOException { FSDataInputStream in = mock(FSDataInputStream.class); when(in.read(position, buf, bufOffset, totalLen)).thenReturn(totalLen); when(in.hasCapability(anyString())).thenReturn(false); - boolean ret = - TraceUtil.trace(() -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), - testName.getMethodName()); - assertFalse("Expect false return when no extra bytes requested", ret); + boolean ret = TraceUtil.trace( + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), methodName); + assertFalse(ret, "Expect false return when no extra bytes requested"); verify(in).read(position, buf, bufOffset, totalLen); verify(in).hasCapability(anyString()); verifyNoMoreInteractions(in); TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", totalLen)))))))); } @@ -351,19 +345,18 @@ public void testPositionalReadShortReadOfNecessaryBytes() throws IOException { when(in.read(position, buf, bufOffset, totalLen)).thenReturn(5); when(in.read(5, buf, 5, 5)).thenReturn(5); when(in.hasCapability(anyString())).thenReturn(false); - boolean ret = - TraceUtil.trace(() -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), - testName.getMethodName()); - assertFalse("Expect false return when no extra bytes requested", ret); + boolean ret = TraceUtil.trace( + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), methodName); + assertFalse(ret, "Expect false return when no extra bytes requested"); verify(in).read(position, buf, bufOffset, totalLen); verify(in).read(5, buf, 5, 5); verify(in).hasCapability(anyString()); verifyNoMoreInteractions(in); TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", totalLen)))))))); } @@ -380,18 +373,17 @@ public void testPositionalReadExtraSucceeded() throws IOException { FSDataInputStream in = mock(FSDataInputStream.class); when(in.read(position, buf, bufOffset, totalLen)).thenReturn(totalLen); when(in.hasCapability(anyString())).thenReturn(false); - boolean ret = - TraceUtil.trace(() -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), - testName.getMethodName()); - assertTrue("Expect true return when reading extra bytes succeeds", ret); + boolean ret = TraceUtil.trace( + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), methodName); + assertTrue(ret, "Expect true return when reading extra bytes succeeds"); verify(in).read(position, buf, bufOffset, totalLen); verify(in).hasCapability(anyString()); verifyNoMoreInteractions(in); TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", totalLen)))))))); } @@ -408,18 +400,17 @@ public void testPositionalReadExtraFailed() throws IOException { FSDataInputStream in = mock(FSDataInputStream.class); when(in.read(position, buf, bufOffset, totalLen)).thenReturn(necessaryLen); when(in.hasCapability(anyString())).thenReturn(false); - boolean ret = - TraceUtil.trace(() -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), - testName.getMethodName()); - assertFalse("Expect false return when reading extra bytes fails", ret); + boolean ret = TraceUtil.trace( + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), methodName); + assertFalse(ret, "Expect false return when reading extra bytes fails"); verify(in).read(position, buf, bufOffset, totalLen); verify(in).hasCapability(anyString()); verifyNoMoreInteractions(in); TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", necessaryLen)))))))); } @@ -437,19 +428,18 @@ public void testPositionalReadShortReadCompletesNecessaryAndExtraBytes() throws when(in.read(position, buf, bufOffset, totalLen)).thenReturn(5); when(in.read(5, buf, 5, 10)).thenReturn(10); when(in.hasCapability(anyString())).thenReturn(false); - boolean ret = - TraceUtil.trace(() -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), - testName.getMethodName()); - assertTrue("Expect true return when reading extra bytes succeeds", ret); + boolean ret = TraceUtil.trace( + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen), methodName); + assertTrue(ret, "Expect true return when reading extra bytes succeeds"); verify(in).read(position, buf, bufOffset, totalLen); verify(in).read(5, buf, 5, 10); verify(in).hasCapability(anyString()); verifyNoMoreInteractions(in); TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), hasAttributes(containsEntry("db.hbase.io.heap_bytes_read", totalLen)))))))); } @@ -464,28 +454,27 @@ public void testPositionalReadPrematureEOF() throws IOException { byte[] buf = new byte[totalLen]; ByteBuff bb = new SingleByteBuff(ByteBuffer.wrap(buf, 0, totalLen)); FSDataInputStream in = mock(FSDataInputStream.class); - when(in.read(position, buf, bufOffset, totalLen)).thenReturn(9); when(in.read(position, buf, bufOffset, totalLen)).thenReturn(-1); when(in.hasCapability(anyString())).thenReturn(false); - exception.expect(IOException.class); - exception.expectMessage("EOF"); - Span span = TraceUtil.createSpan(testName.getMethodName()); + Span span = TraceUtil.createSpan(methodName); try (Scope ignored = span.makeCurrent()) { - BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen); - span.setStatus(StatusCode.OK); - } catch (IOException e) { + IOException e = assertThrows(IOException.class, + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen)); + assertThat(e.getMessage(), containsString("EOF")); TraceUtil.setError(span, e); - throw e; } finally { span.end(); - - TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( - otelRule::getSpans, hasItem(allOf(hasName(testName.getMethodName()), hasEnded())))); - assertThat(otelRule.getSpans(), - hasItems(allOf(hasName(testName.getMethodName()), - hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), - hasAttributes(AttributesMatchers.isEmpty()))))))); } + TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate>( + OTEL_EXT::getSpans, hasItem(allOf(hasName(methodName), hasEnded())))); + assertThat(OTEL_EXT.getSpans(), + hasItems(allOf(hasName(methodName), + hasEvents(hasItem(allOf(EventMatchers.hasName("BlockIOUtils.preadWithExtra"), + hasAttributes(AttributesMatchers.isEmpty()))))))); + + verify(in).read(position, buf, bufOffset, totalLen); + verify(in).hasCapability(anyString()); + verifyNoMoreInteractions(in); } /** @@ -518,8 +507,8 @@ public int read(long position, ByteBuffer buf) throws IOException { @Test public void testByteBufferPositionedReadable() throws IOException { - assumeTrue("Skip the test because ByteBufferPositionedReadable is not available", - isByteBufferPositionedReadable()); + assumeTrue(isByteBufferPositionedReadable(), + "Skip the test because ByteBufferPositionedReadable is not available"); long position = 0; int necessaryLen = 10; int extraLen = 1; @@ -534,7 +523,7 @@ public void testByteBufferPositionedReadable() throws IOException { when(in.read(firstReadLen, buf)).thenReturn(secondReadLen); when(in.hasCapability(anyString())).thenReturn(true); boolean ret = BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen); - assertTrue("Expect true return when reading extra bytes succeeds", ret); + assertTrue(ret, "Expect true return when reading extra bytes succeeds"); verify(in).read(position, buf); verify(in).read(firstReadLen, buf); verify(in).hasCapability(anyString()); @@ -543,8 +532,8 @@ public void testByteBufferPositionedReadable() throws IOException { @Test public void testByteBufferPositionedReadableEOF() throws IOException { - assumeTrue("Skip the test because ByteBufferPositionedReadable is not available", - isByteBufferPositionedReadable()); + assumeTrue(isByteBufferPositionedReadable(), + "Skip the test because ByteBufferPositionedReadable is not available"); long position = 0; int necessaryLen = 10; int extraLen = 0; @@ -555,11 +544,11 @@ public void testByteBufferPositionedReadableEOF() throws IOException { MyFSDataInputStream in = mock(MyFSDataInputStream.class); when(in.read(position, buf)).thenReturn(firstReadLen); - when(in.read(position, buf)).thenReturn(-1); + when(in.read(firstReadLen, buf)).thenReturn(-1); when(in.hasCapability(anyString())).thenReturn(true); - exception.expect(IOException.class); - exception.expectMessage("EOF"); - BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen); + IOException e = assertThrows(IOException.class, + () -> BlockIOUtils.preadWithExtra(bb, in, position, necessaryLen, extraLen)); + assertThat(e.getMessage(), containsString("EOF")); verify(in).read(position, buf); verify(in).read(firstReadLen, buf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBytesReadFromFs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBytesReadFromFs.java index d90a48a4be98..45cce7fd8a35 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBytesReadFromFs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBytesReadFromFs.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -26,7 +29,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; @@ -46,17 +48,14 @@ import org.apache.hadoop.hbase.util.BloomFilterUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestBytesReadFromFs { private static final int NUM_KEYS = 100000; private static final int BLOOM_BLOCK_SIZE = 512; @@ -64,13 +63,6 @@ public class TestBytesReadFromFs { private static final int DATA_BLOCK_SIZE = 4096; private static final int ROW_PREFIX_LENGTH_IN_BLOOM_FILTER = 42; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBytesReadFromFs.class); - - @Rule - public TestName name = new TestName(); - private static final Logger LOG = LoggerFactory.getLogger(TestBytesReadFromFs.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Random RNG = new Random(9713312); // Just a fixed seed. @@ -81,7 +73,7 @@ public class TestBytesReadFromFs { private List keyList = new ArrayList<>(); private Path path; - @Before + @BeforeEach public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, ROW_PREFIX_LENGTH_IN_BLOOM_FILTER); @@ -129,7 +121,7 @@ public void testBytesReadFromFsToReadBloomFilterIndexesAndBloomBlocks() throws I KeyValue keyValue = keyValues.get(0); readBloomFilters(path, bloomType, null, keyValue); } else { - Assert.assertEquals(ROW_PREFIX_LENGTH_IN_BLOOM_FILTER, keyList.get(0).length); + assertEquals(ROW_PREFIX_LENGTH_IN_BLOOM_FILTER, keyList.get(0).length); byte[] key = keyList.get(0); readBloomFilters(path, bloomType, key, null); } @@ -238,15 +230,14 @@ private void readDataAndIndexBlocks(Path path, KeyValue keyValue, boolean isScan reader.close(); - Assert.assertEquals(isScanMetricsEnabled, - ThreadLocalServerSideScanMetrics.isScanMetricsEnabled()); + assertEquals(isScanMetricsEnabled, ThreadLocalServerSideScanMetrics.isScanMetricsEnabled()); bytesRead = isScanMetricsEnabled ? bytesRead : 0; - Assert.assertEquals(bytesRead, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); - Assert.assertEquals(blockLevelsRead, trailer.getNumDataIndexLevels() + 1); - Assert.assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); + assertEquals(bytesRead, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); + assertEquals(blockLevelsRead, trailer.getNumDataIndexLevels() + 1); + assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); // At every index level we read one index block and finally read data block long blockReadOpsCount = isScanMetricsEnabled ? blockLevelsRead : 0; - Assert.assertEquals(blockReadOpsCount, + assertEquals(blockReadOpsCount, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); } @@ -262,9 +253,9 @@ private void readLoadOnOpenDataSection(Path path, boolean hasBloomFilters) throw // Read HFile trailer HFileInfo hfile = new HFileInfo(readerContext, conf); FixedFileTrailer trailer = hfile.getTrailer(); - Assert.assertEquals(trailer.getTrailerSize(), + assertEquals(trailer.getTrailerSize(), ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); - Assert.assertEquals(1, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); + assertEquals(1, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); CacheConfig cacheConfig = new CacheConfig(conf); HFile.Reader reader = new HFilePreadReader(readerContext, hfile, cacheConfig, conf); @@ -301,8 +292,8 @@ private void readLoadOnOpenDataSection(Path path, boolean hasBloomFilters) throw reader.close(); - Assert.assertEquals(hasBloomFilters, bloomFilterIndexesRead); - Assert.assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); + assertEquals(hasBloomFilters, bloomFilterIndexesRead); + assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); } private boolean readEachBlockInLoadOnOpenDataSection(HFileBlock block, boolean readNextHeader) @@ -317,14 +308,14 @@ private boolean readEachBlockInLoadOnOpenDataSection(HFileBlock block, boolean r readNextHeader = true; } block.release(); - Assert.assertEquals(bytesRead, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); - Assert.assertEquals(1, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); + assertEquals(bytesRead, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); + assertEquals(1, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); return readNextHeader; } private void readBloomFilters(Path path, BloomType bt, byte[] key, KeyValue keyValue) throws IOException { - Assert.assertTrue(keyValue == null || key == null); + assertTrue(keyValue == null || key == null); // Assert that the bloom filter index was read and it's size is accounted in bytes read from // fs @@ -344,7 +335,7 @@ private void readBloomFilters(Path path, BloomType bt, byte[] key, KeyValue keyV StoreFileReader reader = sf.getReader(); BloomFilter bloomFilter = reader.getGeneralBloomFilter(); - Assert.assertTrue(bloomFilter instanceof CompoundBloomFilter); + assertTrue(bloomFilter instanceof CompoundBloomFilter); CompoundBloomFilter cbf = (CompoundBloomFilter) bloomFilter; // Get the bloom filter index reader @@ -366,14 +357,14 @@ private void readBloomFilters(Path path, BloomType bt, byte[] key, KeyValue keyV bytesRead += HFileBlock.headerSize(true); } // Asser that the block read is a bloom block - Assert.assertEquals(bloomBlock.getBlockType(), BlockType.BLOOM_CHUNK); + assertEquals(bloomBlock.getBlockType(), BlockType.BLOOM_CHUNK); bloomBlock.release(); // Close the reader reader.close(true); - Assert.assertEquals(bytesRead, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); - Assert.assertEquals(1, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); + assertEquals(bytesRead, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); + assertEquals(1, ThreadLocalServerSideScanMetrics.getBlockReadOpsCountAndReset()); } private void writeBloomFilters(Path path, BloomType bt, int bloomBlockByteSize) @@ -385,8 +376,8 @@ private void writeBloomFilters(Path path, BloomType bt, int bloomBlockByteSize) .withCompression(Compression.Algorithm.NONE).build(); StoreFileWriter w = new StoreFileWriter.Builder(conf, cacheConf, fs).withFileContext(meta) .withBloomType(bt).withFilePath(path).build(); - Assert.assertTrue(w.hasGeneralBloom()); - Assert.assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter); + assertTrue(w.hasGeneralBloom()); + assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter); CompoundBloomFilterWriter cbbf = (CompoundBloomFilterWriter) w.getGeneralBloomWriter(); byte[] cf = Bytes.toBytes("cf"); byte[] cq = Bytes.toBytes("cq"); @@ -400,7 +391,7 @@ private void writeBloomFilters(Path path, BloomType bt, int bloomBlockByteSize) keyList.add(keyBytes); keyValues.add(keyValue); } - Assert.assertEquals(keyList.size(), cbbf.getKeyCount()); + assertEquals(keyList.size(), cbbf.getKeyCount()); w.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index c26c8008a31e..d8c9607ce28f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -42,10 +41,9 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Threads; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,13 +54,10 @@ // (seconds). It is large because it depends on being able to reset the global // blockcache instance which is in a global variable. Experience has it that // tests clash on the global variable if this test is run as small sized test. -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestCacheConfig { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCacheConfig.class); - private static final Logger LOG = LoggerFactory.getLogger(TestCacheConfig.class); private Configuration conf; @@ -151,7 +146,7 @@ public BlockType getBlockType() { } } - @Before + @BeforeEach public void setUp() throws Exception { this.conf = HBaseConfiguration.create(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 5bd4497577eb..1d3835aaea58 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.EnumMap; import java.util.HashMap; import java.util.Iterator; @@ -32,12 +31,13 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -60,15 +60,11 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -79,14 +75,11 @@ * Tests {@link HFile} cache-on-write functionality for the following block types: data blocks, * non-root index blocks, and Bloom filter blocks. */ -@RunWith(Parameterized.class) -@Category({ IOTests.class, LargeTests.class }) +@HBaseParameterizedTestTemplate(name = "{0}-{1}-{2}-{3}") +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(LargeTests.TAG) public class TestCacheOnWrite { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCacheOnWrite.class); - private static final Logger LOG = LoggerFactory.getLogger(TestCacheOnWrite.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); @@ -188,19 +181,18 @@ private static List getBlockCaches() throws IOException { return blockcaches; } - @Parameters - public static Collection getParameters() throws IOException { - List params = new ArrayList<>(); + public static Stream parameters() throws IOException { + List params = new ArrayList<>(); for (BlockCache blockCache : getBlockCaches()) { for (CacheOnWriteType cowType : CacheOnWriteType.values()) { for (Compression.Algorithm compress : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) { for (boolean cacheCompressedData : new boolean[] { false, true }) { - params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache }); + params.add(Arguments.of(cowType, compress, cacheCompressedData, blockCache)); } } } } - return params; + return params.stream(); } private void clearBlockCache(BlockCache blockCache) throws InterruptedException { @@ -228,7 +220,7 @@ private void clearBlockCache(BlockCache blockCache) throws InterruptedException } } - @Before + @BeforeEach public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); this.conf.set("dfs.datanode.data.dir.perm", "700"); @@ -245,12 +237,12 @@ public void setUp() throws IOException { fs = HFileSystem.get(conf); } - @After + @AfterEach public void tearDown() throws IOException, InterruptedException { clearBlockCache(blockCache); } - @AfterClass + @AfterAll public static void afterClass() throws IOException { TEST_UTIL.cleanupTestDir(); } @@ -271,7 +263,7 @@ private void readStoreFile(boolean useTags) throws IOException { final boolean cacheBlocks = false; final boolean pread = false; HFileScanner scanner = reader.getScanner(conf, cacheBlocks, pread); - assertTrue(testDescription, scanner.seekTo()); + assertTrue(scanner.seekTo(), testDescription); long offset = 0; EnumMap blockCountByType = new EnumMap<>(BlockType.class); @@ -290,11 +282,10 @@ private void readStoreFile(boolean useTags) throws IOException { cachedBlocksOffset.add(offset); cachedBlocks.put(offset, fromCache == null ? null : Pair.newPair(block, fromCache)); boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType()); - assertTrue( + assertTrue(shouldBeCached == isCached, "shouldBeCached: " + shouldBeCached + "\n" + "isCached: " + isCached + "\n" + "Test description: " + testDescription + "\n" + "block: " + block + "\n" - + "encodingInCache: " + encodingInCache + "\n" + "blockCacheKey: " + blockCacheKey, - shouldBeCached == isCached); + + "encodingInCache: " + encodingInCache + "\n" + "blockCacheKey: " + blockCacheKey); if (isCached) { if (cacheConf.shouldCacheCompressed(fromCache.getBlockType().getCategory())) { if (compress != Compression.Algorithm.NONE) { @@ -506,30 +497,30 @@ private void testCachingDataBlocksDuringCompactionInternals(boolean useTags, if (cacheOnCompactAndNonBucketCache && cacheBlocksOnCompactionThreshold > 0) { if (cacheBlocksOnCompactionThreshold == CACHE_COMPACTION_HIGH_THRESHOLD) { - assertTrue(assertErrorMessage, dataBlockCached); - assertTrue(assertErrorMessage, bloomBlockCached); - assertTrue(assertErrorMessage, indexBlockCached); + assertTrue(dataBlockCached, assertErrorMessage); + assertTrue(bloomBlockCached, assertErrorMessage); + assertTrue(indexBlockCached, assertErrorMessage); } else { - assertFalse(assertErrorMessage, dataBlockCached); + assertFalse(dataBlockCached, assertErrorMessage); if (localCacheBloomBlocksValue) { - assertTrue(assertErrorMessage, bloomBlockCached); + assertTrue(bloomBlockCached, assertErrorMessage); } else { - assertFalse(assertErrorMessage, bloomBlockCached); + assertFalse(bloomBlockCached, assertErrorMessage); } if (localCacheIndexBlocksValue) { - assertTrue(assertErrorMessage, indexBlockCached); + assertTrue(indexBlockCached, assertErrorMessage); } else { - assertFalse(assertErrorMessage, indexBlockCached); + assertFalse(indexBlockCached, assertErrorMessage); } } } else { - assertEquals(assertErrorMessage, cacheOnCompactAndNonBucketCache, dataBlockCached); + assertEquals(cacheOnCompactAndNonBucketCache, dataBlockCached, assertErrorMessage); if (cacheOnCompactAndNonBucketCache) { - assertTrue(assertErrorMessage, bloomBlockCached); - assertTrue(assertErrorMessage, indexBlockCached); + assertTrue(bloomBlockCached, assertErrorMessage); + assertTrue(indexBlockCached, assertErrorMessage); } } @@ -544,19 +535,19 @@ private void testCachingDataBlocksDuringCompactionInternals(boolean useTags, } } - @Test + @TestTemplate public void testStoreFileCacheOnWrite() throws IOException { testStoreFileCacheOnWriteInternals(false); testStoreFileCacheOnWriteInternals(true); } - @Test + @TestTemplate public void testCachingDataBlocksDuringCompaction() throws IOException, InterruptedException { testCachingDataBlocksDuringCompactionInternals(false, false, -1); testCachingDataBlocksDuringCompactionInternals(true, true, -1); } - @Test + @TestTemplate public void testCachingDataBlocksThresholdDuringCompaction() throws IOException, InterruptedException { testCachingDataBlocksDuringCompactionInternals(false, true, CACHE_COMPACTION_HIGH_THRESHOLD); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheStats.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheStats.java index 75f1f38848f7..1c1924543314 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheStats.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheStats.java @@ -17,22 +17,18 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestCacheStats { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCacheStats.class); - @Test public void testPeriodicMetrics() throws Exception { CacheStats cacheStats = new CacheStats("test", 5, 1, TimeUnit.SECONDS); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java index a821b78ae647..44141eb384bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java @@ -17,23 +17,18 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestCachedBlockQueue { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCachedBlockQueue.class); - @Test public void testQueue() throws Exception { CachedBlock cb1 = new CachedBlock(1000, "cb1", 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java index 053679952091..8626e947333e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -19,9 +19,9 @@ import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.GZ; import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.DataInputStream; @@ -34,7 +34,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -47,20 +46,16 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.ChecksumType; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestChecksum { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestChecksum.class); - private static final Logger LOG = LoggerFactory.getLogger(TestChecksum.class); static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { NONE, GZ }; @@ -71,7 +66,7 @@ public class TestChecksum { private FileSystem fs; private HFileSystem hfs; - @Before + @BeforeEach public void setUp() throws Exception { fs = HFileSystem.get(TEST_UTIL.getConfiguration()); hfs = (HFileSystem) fs; @@ -374,7 +369,7 @@ private void validateData(DataInputStream in) throws IOException { // validate data for (int i = 0; i < 1234; i++) { int val = in.readInt(); - assertEquals("testChecksumCorruption: data mismatch at index " + i, i, val); + assertEquals(i, val, "testChecksumCorruption: data mismatch at index " + i); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java index 8f78c5fb418d..d88f4c242e0e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java @@ -20,28 +20,24 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY; import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY; import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.CombinedCacheStats; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) public class TestCombinedBlockCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCombinedBlockCache.class); - private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); @Test @@ -138,7 +134,7 @@ private CombinedBlockCache createCombinedBlockCache() { conf.set(BUCKET_CACHE_IOENGINE_KEY, "offheap"); conf.setInt(BUCKET_CACHE_SIZE_KEY, 32); BlockCache blockCache = BlockCacheFactory.createBlockCache(conf); - Assert.assertTrue(blockCache instanceof CombinedBlockCache); + assertTrue(blockCache instanceof CombinedBlockCache); return (CombinedBlockCache) blockCache; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java index 8dd7a5bbb890..2e0056001970 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -28,15 +29,15 @@ import java.io.DataOutputStream; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; +import java.util.stream.Stream; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparatorImpl; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.InnerStoreCellComparator; import org.apache.hadoop.hbase.KeyValue; @@ -44,28 +45,20 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos; -@RunWith(Parameterized.class) -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: version={0}") public class TestFixedFileTrailer { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFixedFileTrailer.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFixedFileTrailer.class); private static final int MAX_COMPARATOR_NAME_LENGTH = 128; @@ -88,23 +81,19 @@ public TestFixedFileTrailer(int version) { this.version = version; } - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - @Parameters - public static Collection getParameters() { + public static Stream parameters() { List versionsToTest = new ArrayList<>(); for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v) versionsToTest.add(new Integer[] { v }); - return versionsToTest; + return versionsToTest.stream().map(arr -> Arguments.of(arr[0])); } - @Before + @BeforeEach public void setUp() throws IOException { fs = FileSystem.get(util.getConfiguration()); } - @Test + @TestTemplate public void testComparatorIsHBase1Compatible() { FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); @@ -116,7 +105,7 @@ public void testComparatorIsHBase1Compatible() { assertEquals(KeyValue.META_COMPARATOR.getClass().getName(), pb.getComparatorClassName()); } - @Test + @TestTemplate public void testCreateComparator() throws IOException { FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); try { @@ -149,12 +138,11 @@ public void testCreateComparator() throws IOException { } // Test an invalid comparatorClassName - expectedEx.expect(IOException.class); - t.createComparator(""); + assertThrows(IOException.class, () -> t.createComparator("")); } - @Test + @TestTemplate public void testTrailer() throws IOException { FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION); t.setDataIndexCount(3); @@ -211,8 +199,8 @@ public void testTrailer() throws IOException { String msg = ex.getMessage(); String cleanMsg = msg.replaceAll("^(java(\\.[a-zA-Z]+)+:\\s+)?|\\s+\\(.*\\)\\s*$", ""); // will be followed by " expected: ..." - assertEquals("Actual exception message is \"" + msg + "\".\nCleaned-up message", - "Invalid HFile version: " + invalidVersion, cleanMsg); + assertEquals("Invalid HFile version: " + invalidVersion, cleanMsg, + "Actual exception message is \"" + msg + "\".\nCleaned-up message"); LOG.info("Got an expected exception: " + msg); } } @@ -227,13 +215,12 @@ public void testTrailer() throws IOException { checkLoadedTrailer(version, t, t4); String trailerStr = t.toString(); - assertEquals( - "Invalid number of fields in the string representation " + "of the trailer: " + trailerStr, - NUM_FIELDS_BY_VERSION[version - 2], trailerStr.split(", ").length); + assertEquals(NUM_FIELDS_BY_VERSION[version - 2], trailerStr.split(", ").length, + "Invalid number of fields in the string representation " + "of the trailer: " + trailerStr); assertEquals(trailerStr, t4.toString()); } - @Test + @TestTemplate public void testTrailerForV2NonPBCompatibility() throws Exception { if (version == 2) { FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.MINOR_VERSION_NO_CHECKSUM); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java index 346fd1acef2b..99b95be5f690 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -36,13 +35,10 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; /** * Make sure we always cache important block types, such as index blocks, as long as we have a block @@ -52,15 +48,12 @@ * to reveal more about what is being cached whether DATA or INDEX blocks and then we could do more * verification in this test. */ -@Category({ IOTests.class, MediumTests.class }) -@RunWith(Parameterized.class) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: hfileVersion={0}, cfCacheEnabled={1}") public class TestForceCacheImportantBlocks { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestForceCacheImportantBlocks.class); - - private final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); + private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final String TABLE = "myTable"; private static final String CF = "myCF"; @@ -84,10 +77,8 @@ public class TestForceCacheImportantBlocks { private final int hfileVersion; private final boolean cfCacheEnabled; - @Parameters - public static Collection parameters() { - // HFile versions - return Arrays.asList(new Object[] { 3, true }, new Object[] { 3, false }); + public static Stream parameters() { + return Stream.of(Arguments.of(3, true), Arguments.of(3, false)); } public TestForceCacheImportantBlocks(int hfileVersion, boolean cfCacheEnabled) { @@ -96,12 +87,12 @@ public TestForceCacheImportantBlocks(int hfileVersion, boolean cfCacheEnabled) { TEST_UTIL.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, hfileVersion); } - @Before + @BeforeEach public void setup() { HFile.DATABLOCK_READ_COUNT.reset(); } - @Test + @TestTemplate public void testCacheBlocks() throws IOException { // Set index block size to be the same as normal block size. TEST_UTIL.getConfiguration().setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, BLOCK_SIZE); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index 5a4a76b76e70..f3602e070fde 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -24,11 +24,12 @@ import static org.apache.hadoop.hbase.io.ByteBuffAllocator.MIN_ALLOCATE_SIZE_KEY; import static org.apache.hadoop.hbase.io.hfile.BlockCacheFactory.BLOCKCACHE_POLICY_KEY; import static org.apache.hadoop.hbase.io.hfile.CacheConfig.EVICT_BLOCKS_ON_CLOSE_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.DataInput; import java.io.DataOutput; @@ -57,7 +58,6 @@ import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -85,13 +85,9 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,15 +97,10 @@ /** * test hfile features. */ -@Category({ IOTests.class, SmallTests.class }) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(SmallTests.TAG) public class TestHFile { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestHFile.class); - - @Rule - public TestName testName = new TestName(); - private static final Logger LOG = LoggerFactory.getLogger(TestHFile.class); private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -120,7 +111,7 @@ public class TestHFile { private static Configuration conf; private static FileSystem fs; - @BeforeClass + @BeforeAll public static void setUp() throws Exception { conf = TEST_UTIL.getConfiguration(); cacheConf = new CacheConfig(conf); @@ -155,10 +146,10 @@ private void fillByteBuffAllocator(ByteBuffAllocator alloc, int bufCount) { List buffs = new ArrayList<>(); for (int i = 0; i < bufCount; i++) { buffs.add(alloc.allocateOneBuffer()); - Assert.assertEquals(alloc.getFreeBufferCount(), 0); + assertEquals(alloc.getFreeBufferCount(), 0); } buffs.forEach(ByteBuff::release); - Assert.assertEquals(alloc.getFreeBufferCount(), bufCount); + assertEquals(alloc.getFreeBufferCount(), bufCount); } @Test @@ -170,7 +161,7 @@ public void testReaderWithoutBlockCache() throws Exception { // start write to store file. Path path = writeStoreFile(); readStoreFile(path, conf, alloc); - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); alloc.clean(); } @@ -194,15 +185,15 @@ public void testReaderWithLRUBlockCache() throws Exception { offset += block.getOnDiskSizeWithHeader(); // Ensure the block is an heap one. Cacheable cachedBlock = lru.getBlock(key, false, false, true); - Assert.assertNotNull(cachedBlock); - Assert.assertTrue(cachedBlock instanceof HFileBlock); - Assert.assertFalse(((HFileBlock) cachedBlock).isSharedMem()); + assertNotNull(cachedBlock); + assertTrue(cachedBlock instanceof HFileBlock); + assertFalse(((HFileBlock) cachedBlock).isSharedMem()); // Should never allocate off-heap block from allocator because ensure that it's LRU. - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); block.release(); // return back the ByteBuffer back to allocator. } reader.close(); - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); alloc.clean(); lru.shutdown(); } @@ -272,7 +263,7 @@ private void assertBytesReadFromCache(boolean isScanMetricsEnabled, DataBlockEnc // Initialize the block cache and HFile reader BlockCache lru = BlockCacheFactory.createBlockCache(conf); - Assert.assertTrue(lru instanceof LruBlockCache); + assertTrue(lru instanceof LruBlockCache); CacheConfig cacheConfig = new CacheConfig(conf, null, lru, ByteBuffAllocator.HEAP); HFileReaderImpl reader = (HFileReaderImpl) HFile.createReader(fs, storeFilePath, cacheConfig, true, conf); @@ -281,34 +272,34 @@ private void assertBytesReadFromCache(boolean isScanMetricsEnabled, DataBlockEnc final int offset = 0; BlockCacheKey cacheKey = new BlockCacheKey(storeFilePath.getName(), offset); HFileBlock block = (HFileBlock) lru.getBlock(cacheKey, false, false, true); - Assert.assertNull(block); + assertNull(block); // Assert that first block has not been cached in the block cache and no disk I/O happened to // check that. ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset(); ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset(); block = reader.getCachedBlock(cacheKey, false, false, true, BlockType.DATA, null); - Assert.assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); - Assert.assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); + assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); + assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); // Read the first block from the HFile. block = reader.readBlock(offset, -1, true, true, false, true, BlockType.DATA, null); - Assert.assertNotNull(block); + assertNotNull(block); int bytesReadFromFs = block.getOnDiskSizeWithHeader(); if (block.getNextBlockOnDiskSize() > 0) { bytesReadFromFs += block.headerSize(); } block.release(); // Assert that disk I/O happened to read the first block. - Assert.assertEquals(isScanMetricsEnabled ? bytesReadFromFs : 0, + assertEquals(isScanMetricsEnabled ? bytesReadFromFs : 0, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); - Assert.assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); + assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); // Read the first block again and assert that it has been cached in the block cache. block = reader.getCachedBlock(cacheKey, false, false, true, BlockType.DATA, encoding); long bytesReadFromCache = 0; if (encoding == DataBlockEncoding.NONE) { - Assert.assertNotNull(block); + assertNotNull(block); bytesReadFromCache = block.getOnDiskSizeWithHeader(); if (block.getNextBlockOnDiskSize() > 0) { bytesReadFromCache += block.headerSize(); @@ -316,13 +307,13 @@ private void assertBytesReadFromCache(boolean isScanMetricsEnabled, DataBlockEnc block.release(); // Assert that bytes read from block cache account for same number of bytes that would have // been read from FS if block cache wasn't there. - Assert.assertEquals(bytesReadFromFs, bytesReadFromCache); + assertEquals(bytesReadFromFs, bytesReadFromCache); } else { - Assert.assertNull(block); + assertNull(block); } - Assert.assertEquals(isScanMetricsEnabled ? bytesReadFromCache : 0, + assertEquals(isScanMetricsEnabled ? bytesReadFromCache : 0, ThreadLocalServerSideScanMetrics.getBytesReadFromBlockCacheAndReset()); - Assert.assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); + assertEquals(0, ThreadLocalServerSideScanMetrics.getBytesReadFromFsAndReset()); reader.close(); } @@ -351,8 +342,8 @@ private BlockCache initCombinedBlockCache(final String l1CachePolicy) { that.set(BUCKET_CACHE_IOENGINE_KEY, "offheap"); that.set(BLOCKCACHE_POLICY_KEY, l1CachePolicy); BlockCache bc = BlockCacheFactory.createBlockCache(that); - Assert.assertNotNull(bc); - Assert.assertTrue(bc instanceof CombinedBlockCache); + assertNotNull(bc); + assertTrue(bc instanceof CombinedBlockCache); return bc; } @@ -378,15 +369,15 @@ public void testReaderWithCombinedBlockCache() throws Exception { // Read the cached block. Cacheable cachedBlock = combined.getBlock(key, false, false, true); try { - Assert.assertNotNull(cachedBlock); - Assert.assertTrue(cachedBlock instanceof HFileBlock); + assertNotNull(cachedBlock); + assertTrue(cachedBlock instanceof HFileBlock); HFileBlock hfb = (HFileBlock) cachedBlock; // Data block will be cached in BucketCache, so it should be an off-heap block. if (hfb.getBlockType().isData()) { - Assert.assertTrue(hfb.isSharedMem()); + assertTrue(hfb.isSharedMem()); } else { // Non-data block will be cached in LRUBlockCache, so it must be an on-heap block. - Assert.assertFalse(hfb.isSharedMem()); + assertFalse(hfb.isSharedMem()); } } finally { cachedBlock.release(); @@ -395,7 +386,7 @@ public void testReaderWithCombinedBlockCache() throws Exception { } reader.close(); combined.shutdown(); - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); alloc.clean(); } @@ -451,7 +442,7 @@ private void testReaderBlockAllocationWithLRUCache(boolean cacheConfigCacheBlock } reader.close(); - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); alloc.clean(); lru.shutdown(); } @@ -511,7 +502,7 @@ private void testReaderBlockAllocationWithCombinedCache(boolean cacheConfigCache reader.close(); combined.shutdown(); - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); alloc.clean(); } @@ -528,13 +519,13 @@ private long readAtOffsetWithAllocationAsserts(ByteBuffAllocator alloc, HFile.Re throw e; } - Assert.assertEquals(expectSharedMem, block.isSharedMem()); + assertEquals(expectSharedMem, block.isSharedMem()); if (expectSharedMem) { - Assert.assertTrue(alloc.getFreeBufferCount() < alloc.getTotalBufferCount()); + assertTrue(alloc.getFreeBufferCount() < alloc.getTotalBufferCount()); } else { // Should never allocate off-heap block from allocator because ensure that it's LRU. - Assert.assertEquals(alloc.getTotalBufferCount(), alloc.getFreeBufferCount()); + assertEquals(alloc.getTotalBufferCount(), alloc.getFreeBufferCount()); } try { @@ -596,8 +587,8 @@ public static KeyValue.Type generateKeyType(Random rand) { * Test empty HFile. Test all features work reasonably when hfile is empty of entries. */ @Test - public void testEmptyHFile() throws IOException { - Path f = new Path(ROOT_DIR, testName.getMethodName()); + public void testEmptyHFile(TestInfo testInfo) throws IOException { + Path f = new Path(ROOT_DIR, testInfo.getTestMethod().get().getName()); HFileContext context = new HFileContextBuilder().withIncludesTags(false).build(); Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create(); @@ -611,8 +602,8 @@ public void testEmptyHFile() throws IOException { * Create 0-length hfile and show that it fails */ @Test - public void testCorrupt0LengthHFile() throws IOException { - Path f = new Path(ROOT_DIR, testName.getMethodName()); + public void testCorrupt0LengthHFile(TestInfo testInfo) throws IOException { + Path f = new Path(ROOT_DIR, testInfo.getTestMethod().get().getName()); FSDataOutputStream fsos = fs.create(f); fsos.close(); @@ -626,13 +617,13 @@ public void testCorrupt0LengthHFile() throws IOException { } @Test - public void testCorruptOutOfOrderHFileWrite() throws IOException { - Path path = new Path(ROOT_DIR, testName.getMethodName()); + public void testCorruptOutOfOrderHFileWrite(TestInfo testInfo) throws IOException { + Path path = new Path(ROOT_DIR, testInfo.getTestMethod().get().getName()); FSDataOutputStream mockedOutputStream = Mockito.mock(FSDataOutputStream.class); String columnFamily = "MyColumnFamily"; String tableName = "MyTableName"; HFileContext fileContext = - new HFileContextBuilder().withHFileName(testName.getMethodName() + "HFile") + new HFileContextBuilder().withHFileName(testInfo.getTestMethod().get().getName() + "HFile") .withBlockSize(minBlockSize).withColumnFamily(Bytes.toBytes(columnFamily)) .withTableName(Bytes.toBytes(tableName)).withHBaseCheckSum(false) .withCompression(Compression.Algorithm.NONE).withCompressTags(false).build(); @@ -655,12 +646,12 @@ public void testCorruptOutOfOrderHFileWrite() throws IOException { writer.append(secondCell); } catch (IOException ie) { String message = ie.getMessage(); - Assert.assertTrue(message.contains("not lexically larger")); - Assert.assertTrue(message.contains(tableName)); - Assert.assertTrue(message.contains(columnFamily)); + assertTrue(message.contains("not lexically larger")); + assertTrue(message.contains(tableName)); + assertTrue(message.contains(columnFamily)); return; } - Assert.fail("Exception wasn't thrown even though Cells were appended in the wrong order!"); + fail("Exception wasn't thrown even though Cells were appended in the wrong order!"); } public static void truncateFile(FileSystem fs, Path src, Path dst) throws IOException { @@ -682,8 +673,8 @@ public static void truncateFile(FileSystem fs, Path src, Path dst) throws IOExce * Create a truncated hfile and verify that exception thrown. */ @Test - public void testCorruptTruncatedHFile() throws IOException { - Path f = new Path(ROOT_DIR, testName.getMethodName()); + public void testCorruptTruncatedHFile(TestInfo testInfo) throws IOException { + Path f = new Path(ROOT_DIR, testInfo.getTestMethod().get().getName()); HFileContext context = new HFileContextBuilder().build(); Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(this.fs, f).withFileContext(context) .create(); @@ -743,11 +734,11 @@ private int readAndCheckbytes(HFileScanner scanner, int start, int n) throws IOE Bytes.toBytes("qual"), Bytes.toBytes(valStr)); byte[] keyBytes = new KeyValue.KeyOnlyKeyValue(Bytes.toBytes(key), 0, Bytes.toBytes(key).length).getKey(); - assertTrue("bytes for keys do not match " + keyStr + " " + Bytes.toString(Bytes.toBytes(key)), - Arrays.equals(kv.getKey(), keyBytes)); + assertTrue(Arrays.equals(kv.getKey(), keyBytes), + "bytes for keys do not match " + keyStr + " " + Bytes.toString(Bytes.toBytes(key))); byte[] valBytes = Bytes.toBytes(val); - assertTrue("bytes for vals do not match " + valStr + " " + Bytes.toString(valBytes), - Arrays.equals(Bytes.toBytes(valStr), valBytes)); + assertTrue(Arrays.equals(Bytes.toBytes(valStr), valBytes), + "bytes for vals do not match " + valStr + " " + Bytes.toString(valBytes)); if (!scanner.next()) { break; } @@ -792,7 +783,7 @@ void basicWithSomeCodec(String codec, boolean useTags) throws IOException { FSDataInputStream fin = fs.open(ncHFile); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, ncHFile).build(); Reader reader = createReaderFromStream(context, cacheConf, conf); - System.out.println(cacheConf.toString()); + LOG.info(cacheConf.toString()); // Load up the index. // Get a scanner that caches and that does not use pread. HFileScanner scanner = reader.getScanner(conf, true, false); @@ -800,12 +791,12 @@ void basicWithSomeCodec(String codec, boolean useTags) throws IOException { scanner.seekTo(); readAllRecords(scanner); int seekTo = scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))); - System.out.println(seekTo); - assertTrue("location lookup failed", - scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))) == 0); + LOG.info("" + seekTo); + assertTrue(scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))) == 0, + "location lookup failed"); // read the key and see if it matches ByteBuffer readKey = ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()); - assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), Bytes.toBytes(readKey))); + assertTrue(Arrays.equals(getSomeKey(50), Bytes.toBytes(readKey)), "seeked key does not match"); scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0))); ByteBuffer val1 = scanner.getValue(); @@ -858,9 +849,9 @@ private void someTestingWithMetaBlock(Writer writer) { private void readNumMetablocks(Reader reader, int n) throws IOException { for (int i = 0; i < n; i++) { ByteBuff actual = reader.getMetaBlock("HFileMeta" + i, false).getBufferWithoutHeader(); - ByteBuffer expected = ByteBuffer.wrap(("something to test" + i).getBytes()); - assertEquals("failed to match metadata", Bytes.toStringBinary(expected), Bytes.toStringBinary( - actual.array(), actual.arrayOffset() + actual.position(), actual.capacity())); + ByteBuffer expected = ByteBuffer.wrap(Bytes.toBytes("something to test" + i)); + assertEquals(Bytes.toStringBinary(expected), Bytes.toStringBinary(actual.array(), + actual.arrayOffset() + actual.position(), actual.capacity()), "failed to match metadata"); } } @@ -1108,13 +1099,13 @@ public void testGetShortMidpoint() { } @Test - public void testDBEShipped() throws IOException { + public void testDBEShipped(TestInfo testInfo) throws IOException { for (DataBlockEncoding encoding : DataBlockEncoding.values()) { DataBlockEncoder encoder = encoding.getEncoder(); if (encoder == null) { continue; } - Path f = new Path(ROOT_DIR, testName.getMethodName() + "_" + encoding); + Path f = new Path(ROOT_DIR, testInfo.getTestMethod().get().getName() + "_" + encoding); HFileContext context = new HFileContextBuilder().withIncludesTags(false).withDataBlockEncoding(encoding).build(); HFileWriterImpl writer = (HFileWriterImpl) HFile.getWriterFactory(conf, cacheConf) @@ -1187,14 +1178,14 @@ private void testReaderCombinedCache(final String l1CachePolicy) throws Exceptio // Read the cached block. cachedBlock = combined.getBlock(key, false, false, true); try { - Assert.assertNotNull(cachedBlock); - Assert.assertTrue(cachedBlock instanceof HFileBlock); + assertNotNull(cachedBlock); + assertTrue(cachedBlock instanceof HFileBlock); HFileBlock hfb = (HFileBlock) cachedBlock; // Data block will be cached in BucketCache, so it should be an off-heap block. if (hfb.getBlockType().isData()) { - Assert.assertTrue(hfb.isSharedMem()); + assertTrue(hfb.isSharedMem()); } else if (!l1CachePolicy.equals("TinyLfu")) { - Assert.assertFalse(hfb.isSharedMem()); + assertFalse(hfb.isSharedMem()); } } finally { cachedBlock.release(); @@ -1204,9 +1195,9 @@ private void testReaderCombinedCache(final String l1CachePolicy) throws Exceptio reader.close(); combined.shutdown(); if (cachedBlock != null) { - Assert.assertEquals(0, cachedBlock.refCnt()); + assertEquals(0, cachedBlock.refCnt()); } - Assert.assertEquals(bufCount, alloc.getFreeBufferCount()); + assertEquals(bufCount, alloc.getFreeBufferCount()); alloc.clean(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 33788b692b92..1116ce8f3ee6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -20,7 +20,11 @@ import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP; import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.GZ; import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; @@ -28,7 +32,6 @@ import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -41,6 +44,7 @@ import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Executors; import java.util.concurrent.Future; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -49,8 +53,8 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -72,26 +76,20 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.Compressor; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, LargeTests.class }) -@RunWith(Parameterized.class) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate( + name = "{index}: includesMemstoreTS={0}, includesTag={1}, useHeapAllocator={2}") public class TestHFileBlock { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileBlock.class); - // change this value to activate more logs private static final boolean detailedLogging = false; private static final boolean[] BOOLEAN_VALUES = new boolean[] { false, true }; @@ -126,18 +124,15 @@ public TestHFileBlock(boolean includesMemstoreTS, boolean includesTag, boolean u assertAllocator(); } - @Parameters - public static Collection parameters() { - List params = new ArrayList<>(); - // Generate boolean triples from 000 to 111 + public static Stream parameters() { + List params = new ArrayList<>(); for (int i = 0; i < (1 << 3); i++) { - Object[] flags = new Boolean[3]; - for (int k = 0; k < 3; k++) { - flags[k] = (i & (1 << k)) != 0; - } - params.add(flags); + boolean v0 = (i & (1 << 0)) != 0; + boolean v1 = (i & (1 << 1)) != 0; + boolean v2 = (i & (1 << 2)) != 0; + params.add(Arguments.of(v0, v1, v2)); } - return params; + return params.stream(); } private ByteBuffAllocator createOffHeapAlloc() { @@ -162,12 +157,12 @@ private void assertAllocator() { } } - @Before + @BeforeEach public void setUp() throws IOException { fs = HFileSystem.get(TEST_UTIL.getConfiguration()); } - @After + @AfterEach public void tearDown() throws IOException { assertAllocator(); alloc.clean(); @@ -290,7 +285,7 @@ public String createTestBlockStr(Compression.Algorithm algo, int correctLength, return Bytes.toStringBinary(testV2Block); } - @Test + @TestTemplate public void testNoCompression() throws IOException { CacheConfig cacheConf = Mockito.mock(CacheConfig.class); Mockito.when(cacheConf.getBlockCache()).thenReturn(Optional.empty()); @@ -302,7 +297,7 @@ public void testNoCompression() throws IOException { assertTrue(block.isUnpacked()); } - @Test + @TestTemplate public void testGzipCompression() throws IOException { // @formatter:off String correctTestBlockStr = "DATABLK*\\x00\\x00\\x00>\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF" @@ -333,7 +328,7 @@ public void testGzipCompression() throws IOException { testBlockStr.substring(0, correctGzipBlockLength - 4)); } - @Test + @TestTemplate public void testReaderV2() throws IOException { testReaderV2Internals(); } @@ -406,10 +401,9 @@ protected void testReaderV2Internals() throws IOException { fail("Exception expected"); } catch (IOException ex) { String expectedPrefix = "Passed in onDiskSizeWithHeader="; - assertTrue( + assertTrue(ex.getMessage().startsWith(expectedPrefix), "Invalid exception message: '" + ex.getMessage() - + "'.\nMessage is expected to start with: '" + expectedPrefix + "'", - ex.getMessage().startsWith(expectedPrefix)); + + "'.\nMessage is expected to start with: '" + expectedPrefix + "'"); } assertRelease(b); is.close(); @@ -423,7 +417,7 @@ protected void testReaderV2Internals() throws IOException { * Test encoding/decoding data blocks. * @throws IOException a bug or a problem with temporary files. */ - @Test + @TestTemplate public void testDataBlockEncoding() throws IOException { testInternals(); } @@ -497,18 +491,17 @@ private void testInternals() throws IOException { LOG.info("packedHeapsize=" + packedHeapsize + ", unpackedHeadsize=" + blockUnpacked.heapSize()); assertFalse(packedHeapsize == blockUnpacked.heapSize()); - assertTrue("Packed heapSize should be < unpacked heapSize", - packedHeapsize < blockUnpacked.heapSize()); + assertTrue(packedHeapsize < blockUnpacked.heapSize(), + "Packed heapSize should be < unpacked heapSize"); } ByteBuff actualBuffer = blockUnpacked.getBufferWithoutHeader(); if (encoding != DataBlockEncoding.NONE) { // We expect a two-byte big-endian encoding id. - assertEquals( - "Unexpected first byte with " + buildMessageDetails(algo, encoding, pread), - Long.toHexString(0), Long.toHexString(actualBuffer.get(0))); - assertEquals( - "Unexpected second byte with " + buildMessageDetails(algo, encoding, pread), - Long.toHexString(encoding.getId()), Long.toHexString(actualBuffer.get(1))); + assertEquals(Long.toHexString(0), Long.toHexString(actualBuffer.get(0)), + "Unexpected first byte with " + buildMessageDetails(algo, encoding, pread)); + assertEquals(Long.toHexString(encoding.getId()), + Long.toHexString(actualBuffer.get(1)), + "Unexpected second byte with " + buildMessageDetails(algo, encoding, pread)); actualBuffer.position(2); actualBuffer = actualBuffer.slice(); } @@ -525,12 +518,12 @@ private void testInternals() throws IOException { blockFromHFile.serialize(serialized, true); HFileBlock deserialized = (HFileBlock) blockFromHFile.getDeserializer() .deserialize(new SingleByteBuff(serialized), HEAP); - assertEquals("Serialization did not preserve block state. reuseBuffer=" + reuseBuffer, - blockFromHFile, deserialized); + assertEquals(blockFromHFile, deserialized, + "Serialization did not preserve block state. reuseBuffer=" + reuseBuffer); // intentional reference comparison if (blockFromHFile != blockUnpacked) { - assertEquals("Deserialized block cannot be unpacked correctly.", blockUnpacked, - deserialized.unpack(meta, hbr)); + assertEquals(blockUnpacked, deserialized.unpack(meta, hbr), + "Deserialized block cannot be unpacked correctly."); } } assertRelease(blockUnpacked); @@ -575,7 +568,7 @@ private static String nextBytesToStr(ByteBuff buf, int pos) { + (numBytes < maxBytes ? "..." : ""); } - @Test + @TestTemplate public void testPreviousOffset() throws IOException { testPreviousOffsetInternals(); } @@ -620,10 +613,11 @@ protected void testPreviousOffsetInternals() throws IOException { if (detailedLogging) { LOG.info("Block #" + i + ": " + b); } - assertEquals("Invalid block #" + i + "'s type:", expectedTypes.get(i), - b.getBlockType()); - assertEquals("Invalid previous block offset for block " + i + " of " + "type " - + b.getBlockType() + ":", (long) expectedPrevOffsets.get(i), b.getPrevBlockOffset()); + assertEquals(expectedTypes.get(i), b.getBlockType(), + "Invalid block #" + i + "'s type:"); + assertEquals((long) expectedPrevOffsets.get(i), b.getPrevBlockOffset(), + "Invalid previous block offset for block " + i + " of " + "type " + b.getBlockType() + + ":"); b.sanityCheck(); assertEquals(curOffset, b.getOffset()); @@ -679,7 +673,7 @@ protected void testPreviousOffsetInternals() throws IOException { LOG.warn(wrongBytesMsg); } } - assertTrue(wrongBytesMsg, bytesAreCorrect); + assertTrue(bytesAreCorrect, wrongBytesMsg); assertRelease(newBlock); if (newBlock != b) { assertRelease(b); @@ -768,7 +762,7 @@ public Boolean call() throws Exception { } } - @Test + @TestTemplate public void testConcurrentReading() throws Exception { testConcurrentReadingInternals(); } @@ -869,7 +863,7 @@ private long writeBlocks(Configuration conf, Random rand, Compression.Algorithm return totalSize; } - @Test + @TestTemplate public void testBlockHeapSize() { testBlockHeapSizeInternals(); } @@ -896,15 +890,14 @@ protected void testBlockHeapSizeInternals() { long hfileMetaSize = ClassSize.align(ClassSize.estimateBase(HFileContext.class, true)); long hfileBlockExpectedSize = ClassSize.align(ClassSize.estimateBase(HFileBlock.class, true)); long expected = hfileBlockExpectedSize + byteBufferExpectedSize + hfileMetaSize; - assertEquals( + assertEquals(expected, block.heapSize(), "Block data size: " + size + ", byte buffer expected " + "size: " + byteBufferExpectedSize + ", HFileBlock class expected " + "size: " + hfileBlockExpectedSize - + " HFileContext class expected size: " + hfileMetaSize + "; ", - expected, block.heapSize()); + + " HFileContext class expected size: " + hfileMetaSize + "; "); } } - @Test + @TestTemplate public void testSerializeWithoutNextBlockMetadata() { int size = 100; int length = HConstants.HFILEBLOCK_HEADER_SIZE + size; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index 9a93829266f8..f6e10f5cf39a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayOutputStream; import java.io.DataOutput; @@ -28,12 +28,12 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -41,8 +41,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -66,29 +66,23 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector; -@RunWith(Parameterized.class) -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: compr={0}") public class TestHFileBlockIndex { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileBlockIndex.class); - - @Parameters - public static Collection compressionAlgorithms() { - return HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS_PARAMETERIZED; + public static Stream parameters() { + return HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS_PARAMETERIZED.stream() + .map(Arguments::of); } public TestHFileBlockIndex(Compression.Algorithm compr) { @@ -124,7 +118,7 @@ public TestHFileBlockIndex(Compression.Algorithm compr) { assert INDEX_CHUNK_SIZES.length == UNCOMPRESSED_INDEX_SIZES.length; } - @Before + @BeforeEach public void setUp() throws IOException { keys.clear(); firstKeyInFile = null; @@ -137,7 +131,7 @@ public void setUp() throws IOException { fs = HFileSystem.get(conf); } - @Test + @TestTemplate public void testBlockIndex() throws IOException { testBlockIndexInternals(false); clear(); @@ -175,7 +169,7 @@ private void writeDataBlocksAndCreateIndex(HFileBlock.Writer hbw, FSDataOutputSt outputStream.close(); } - @Test + @TestTemplate public void testBlockIndexWithOffHeapBuffer() throws Exception { ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.PARANOID); path = new Path(TEST_UTIL.getDataTestDir(), "block_index_testBlockIndexWithOffHeapBuffer"); @@ -218,7 +212,7 @@ public void onLeak(String s, String s1) { assertEquals(0, counter.get()); } - @Test + @TestTemplate public void testIntermediateIndexCacheOnWriteDoesNotLeak() throws Exception { Configuration localConf = new Configuration(TEST_UTIL.getConfiguration()); localConf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MAX_FORMAT_VERSION); @@ -383,7 +377,7 @@ private void readIndex(boolean useTags) throws IOException { String keyStr = "key #" + i + ", " + Bytes.toStringBinary(key); - assertTrue("seekToDataBlock failed for " + keyStr, b != null); + assertTrue(b != null, "seekToDataBlock failed for " + keyStr); if (prevOffset == b.getOffset()) { assertEquals(++expectedHitCount, brw.hitCount); @@ -438,7 +432,7 @@ private static final int getDummyOnDiskSize(int i) { return i * i * 37 + i * 19 + 13; } - @Test + @TestTemplate public void testSecondaryIndexBinarySearch() throws IOException { int numTotalKeys = 99; assertTrue(numTotalKeys % 2 == 1); // Ensure no one made this even. @@ -537,7 +531,7 @@ public void testSecondaryIndexBinarySearch() throws IOException { referenceItem = i - 1; } - assertEquals(lookupFailureMsg, expectedResult, searchResult); + assertEquals(expectedResult, searchResult, lookupFailureMsg); // Now test we can get the offset and the on-disk-size using a // higher-level API function.s @@ -550,14 +544,14 @@ public void testSecondaryIndexBinarySearch() throws IOException { } else { assertTrue(locateBlockResult); String errorMsg = "i=" + i + ", position=" + nonRootIndex.position(); - assertEquals(errorMsg, getDummyFileOffset(referenceItem), nonRootIndex.getLong()); - assertEquals(errorMsg, getDummyOnDiskSize(referenceItem), nonRootIndex.getInt()); + assertEquals(getDummyFileOffset(referenceItem), nonRootIndex.getLong(), errorMsg); + assertEquals(getDummyOnDiskSize(referenceItem), nonRootIndex.getInt(), errorMsg); } } } - @Test + @TestTemplate public void testBlockIndexChunk() throws IOException { BlockIndexChunk c = new HFileBlockIndex.BlockIndexChunkImpl(); HFileIndexBlockEncoder indexBlockEncoder = NoOpIndexBlockEncoder.INSTANCE; @@ -593,7 +587,7 @@ public void testBlockIndexChunk() throws IOException { } /** Checks if the HeapSize calculator is within reason */ - @Test + @TestTemplate public void testHeapSizeForBlockIndex() throws IOException { Class cl = HFileBlockIndex.BlockIndexReader.class; long expected = ClassSize.estimateBase(cl, false); @@ -616,7 +610,7 @@ public void testHeapSizeForBlockIndex() throws IOException { /** * to check if looks good when midKey on a leaf index block boundary */ - @Test + @TestTemplate public void testMidKeyOnLeafIndexBlockBoundary() throws IOException { Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_midkey"); int maxChunkSize = 512; @@ -672,7 +666,7 @@ public void testMidKeyOnLeafIndexBlockBoundary() throws IOException { * Testing block index through the HFile writer/reader APIs. Allows to test setting index block * size through configuration, intermediate-level index blocks, and caching index blocks on write. */ - @Test + @TestTemplate public void testHFileWriterAndReader() throws IOException { Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_block_index"); CacheConfig cacheConf = new CacheConfig(conf, BlockCacheFactory.createBlockCache(conf)); @@ -770,8 +764,8 @@ public void testHFileWriterAndReader() throws IOException { // If the first key of the block is not among the keys written, we // are not parsing the non-root index block format correctly. - assertTrue("Invalid block key from leaf-level block: " + blockKeyStr, - keyStrSet.contains(blockKeyStr)); + assertTrue(keyStrSet.contains(blockKeyStr), + "Invalid block key from leaf-level block: " + blockKeyStr); } } @@ -788,15 +782,14 @@ public void testHFileWriterAndReader() throws IOException { } private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i) throws IOException { - assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0, - scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i]))); + assertEquals(0, scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i])), + "Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")"); } private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr, ByteBuffer buf) { - assertEquals( - msgPrefix + ": expected " + Bytes.toStringBinary(arr) + ", actual " - + Bytes.toStringBinary(buf), - 0, Bytes.compareTo(arr, 0, arr.length, buf.array(), buf.arrayOffset(), buf.limit())); + assertEquals(0, + Bytes.compareTo(arr, 0, arr.length, buf.array(), buf.arrayOffset(), buf.limit()), msgPrefix + + ": expected " + Bytes.toStringBinary(arr) + ", actual " + Bytes.toStringBinary(buf)); } /** Check a key/value pair after it was read by the reader */ @@ -808,12 +801,12 @@ private void checkKeyValue(String msgPrefix, byte[] expectedKey, byte[] expected assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue, valueRead); } - @Test + @TestTemplate public void testIntermediateLevelIndicesWithLargeKeys() throws IOException { testIntermediateLevelIndicesWithLargeKeys(16); } - @Test + @TestTemplate public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException { // because of the large rowKeys, we will end up with a 50-level block index without sanity check testIntermediateLevelIndicesWithLargeKeys(2); @@ -855,7 +848,7 @@ public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws * This test is for HBASE-27940, which midkey metadata in root index block would always be ignored * by {@link BlockIndexReader#readMultiLevelIndexRoot}. */ - @Test + @TestTemplate public void testMidKeyReadSuccessfullyFromRootIndexBlock() throws IOException { conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, 128); Path hfilePath = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockUnpack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockUnpack.java index a8a32595ebd0..714ef8801623 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockUnpack.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockUnpack.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -43,19 +42,14 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ IOTests.class, MediumTests.class }) -public class TestHFileBlockUnpack { +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileBlockUnpack.class); +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) +public class TestHFileBlockUnpack { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -66,11 +60,9 @@ public class TestHFileBlockUnpack { ByteBuffAllocator allocator; - @Rule - public TestName name = new TestName(); private FileSystem fs; - @Before + @BeforeEach public void setUp() throws Exception { fs = HFileSystem.get(TEST_UTIL.getConfiguration()); Configuration conf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); @@ -85,8 +77,8 @@ public void setUp() throws Exception { * https://issues.apache.org/jira/browse/HBASE-27053 */ @Test - public void itUnpacksIdenticallyEachTime() throws IOException { - Path path = new Path(TEST_UTIL.getDataTestDir(), name.getMethodName()); + public void itUnpacksIdenticallyEachTime(TestInfo testInfo) throws IOException { + Path path = new Path(TEST_UTIL.getDataTestDir(), testInfo.getTestMethod().get().getName()); int totalSize = createTestBlock(path); // Allocate a bunch of random buffers, so we can be sure that unpack will only have "dirty" @@ -142,28 +134,27 @@ private void assertBuffersEqual(ByteBuff bufferOne, ByteBuff bufferTwo, int expe * {@link HFileBlock#isSharedMem()} == true See https://issues.apache.org/jira/browse/HBASE-27170 */ @Test - public void itUsesSharedMemoryIfUnpackedBlockExceedsMinAllocationSize() throws IOException { - Path path = new Path(TEST_UTIL.getDataTestDir(), name.getMethodName()); + public void itUsesSharedMemoryIfUnpackedBlockExceedsMinAllocationSize(TestInfo testInfo) + throws IOException { + Path path = new Path(TEST_UTIL.getDataTestDir(), testInfo.getTestMethod().get().getName()); int totalSize = createTestBlock(path); HFileBlockWrapper blockFromHFile = readBlock(path, totalSize); - assertFalse("expected hfile block to NOT be unpacked", blockFromHFile.original.isUnpacked()); - assertFalse("expected hfile block to NOT use shared memory", - blockFromHFile.original.isSharedMem()); + assertFalse(blockFromHFile.original.isUnpacked(), "expected hfile block to NOT be unpacked"); + assertFalse(blockFromHFile.original.isSharedMem(), + "expected hfile block to NOT use shared memory"); - assertTrue( + assertTrue(blockFromHFile.original.getOnDiskSizeWithHeader() < MIN_ALLOCATION_SIZE, "expected generated block size " + blockFromHFile.original.getOnDiskSizeWithHeader() - + " to be less than " + MIN_ALLOCATION_SIZE, - blockFromHFile.original.getOnDiskSizeWithHeader() < MIN_ALLOCATION_SIZE); - assertTrue( + + " to be less than " + MIN_ALLOCATION_SIZE); + assertTrue(blockFromHFile.original.getUncompressedSizeWithoutHeader() > MIN_ALLOCATION_SIZE, "expected generated block uncompressed size " + blockFromHFile.original.getUncompressedSizeWithoutHeader() + " to be more than " - + MIN_ALLOCATION_SIZE, - blockFromHFile.original.getUncompressedSizeWithoutHeader() > MIN_ALLOCATION_SIZE); + + MIN_ALLOCATION_SIZE); - assertTrue("expected unpacked block to be unpacked", blockFromHFile.unpacked.isUnpacked()); - assertTrue("expected unpacked block to use shared memory", - blockFromHFile.unpacked.isSharedMem()); + assertTrue(blockFromHFile.unpacked.isUnpacked(), "expected unpacked block to be unpacked"); + assertTrue(blockFromHFile.unpacked.isSharedMem(), + "expected unpacked block to use shared memory"); } private final static class HFileBlockWrapper { @@ -207,9 +198,8 @@ private int createTestBlock(Path path) throws IOException { writeTestKeyValues(hbw, MIN_ALLOCATION_SIZE - 1); hbw.writeHeaderAndData(os); totalSize = hbw.getOnDiskSizeWithHeader(); - assertTrue( - "expected generated block size " + totalSize + " to be less than " + MIN_ALLOCATION_SIZE, - totalSize < MIN_ALLOCATION_SIZE); + assertTrue(totalSize < MIN_ALLOCATION_SIZE, + "expected generated block size " + totalSize + " to be less than " + MIN_ALLOCATION_SIZE); } return totalSize; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java index d62cd168ea17..e273823028ed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java @@ -17,20 +17,20 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Collection; import java.util.List; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; @@ -45,20 +45,18 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.RedundantKVGenerator; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -@RunWith(Parameterized.class) -@Category({ IOTests.class, MediumTests.class }) +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: dataBlockEncoding={0}, includesMemstoreTS={1}") public class TestHFileDataBlockEncoder { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileDataBlockEncoder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileDataBlockEncoder.class); private final Configuration conf = HBaseConfiguration.create(); private final RedundantKVGenerator generator = new RedundantKVGenerator(); @@ -72,14 +70,14 @@ public class TestHFileDataBlockEncoder { public TestHFileDataBlockEncoder(HFileDataBlockEncoder blockEncoder, boolean includesMemstoreTS) { this.blockEncoder = blockEncoder; this.includesMemstoreTS = includesMemstoreTS; - System.err.println("Encoding: " + blockEncoder.getDataBlockEncoding() + ", includesMemstoreTS: " + LOG.info("Encoding: " + blockEncoder.getDataBlockEncoding() + ", includesMemstoreTS: " + includesMemstoreTS); } /** * Test putting and taking out blocks into cache with different encoding options. */ - @Test + @TestTemplate public void testEncodingWithCache() throws IOException { testEncodingWithCacheInternals(false); testEncodingWithCacheInternals(true); @@ -104,14 +102,14 @@ private void testEncodingWithCacheInternals(boolean useTag) throws IOException { assertEquals(block.getBufferReadOnly(), returnedBlock.getBufferReadOnly()); } else { if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) { - System.out.println(blockEncoder); + LOG.info("" + blockEncoder); } assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType()); } } /** Test for HBASE-5746. */ - @Test + @TestTemplate public void testHeaderSizeInCacheWithoutChecksum() throws Exception { testHeaderSizeInCacheWithoutChecksumInternals(false); testHeaderSizeInCacheWithoutChecksumInternals(true); @@ -139,7 +137,7 @@ private void testHeaderSizeInCacheWithoutChecksumInternals(boolean useTags) thro /** * Test encoding. */ - @Test + @TestTemplate public void testEncoding() throws IOException { testEncodingInternals(false); testEncodingInternals(true); @@ -149,7 +147,7 @@ public void testEncoding() throws IOException { * Test encoding with offheap keyvalue. This test just verifies if the encoders work with DBB and * does not use the getXXXArray() API */ - @Test + @TestTemplate public void testEncodingWithOffheapKeyValue() throws IOException { // usually we have just block without headers, but don't complicate that try { @@ -229,20 +227,18 @@ private void writeBlock(Configuration conf, List kvs, HFileContext fileCon } } - /** Returns All possible data block encoding configurations */ - @Parameters - public static Collection getAllConfigurations() { - List configurations = new ArrayList<>(); + public static Stream parameters() { + List configurations = new ArrayList<>(); for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) { for (boolean includesMemstoreTS : new boolean[] { false, true }) { HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE) ? NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(diskAlgo); - configurations.add(new Object[] { dbe, includesMemstoreTS }); + configurations.add(Arguments.of(dbe, includesMemstoreTS)); } } - return configurations; + return configurations.stream(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index 5b1e1a133a77..a61fade1909f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -35,7 +35,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -51,27 +50,23 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.RedundantKVGenerator; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHFileEncryption { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileEncryption.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFileEncryption.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static FileSystem fs; private static Encryption.Context cryptoContext; - @BeforeClass + @BeforeAll public static void setUp() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); // Disable block cache in this test. @@ -240,11 +235,11 @@ public void testHFileEncryption() throws Exception { FixedFileTrailer trailer = reader.getTrailer(); assertNotNull(trailer.getEncryptionKey()); scanner = reader.getScanner(conf, false, false); - assertTrue("Initial seekTo failed", scanner.seekTo()); + assertTrue(scanner.seekTo(), "Initial seekTo failed"); do { Cell kv = scanner.getCell(); - assertTrue("Read back an unexpected or invalid KV", - testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); + assertTrue(testKvs.contains(KeyValueUtil.ensureKeyValue(kv)), + "Read back an unexpected or invalid KV"); i++; } while (scanner.next()); } finally { @@ -252,7 +247,7 @@ public void testHFileEncryption() throws Exception { scanner.close(); } - assertEquals("Did not read back as many KVs as written", i, testKvs.size()); + assertEquals(i, testKvs.size(), "Did not read back as many KVs as written"); // Test random seeks with pread LOG.info("Random seeking with " + fileContext); @@ -260,10 +255,10 @@ public void testHFileEncryption() throws Exception { reader = HFile.createReader(fs, path, cacheConf, true, conf); try { scanner = reader.getScanner(conf, false, true); - assertTrue("Initial seekTo failed", scanner.seekTo()); + assertTrue(scanner.seekTo(), "Initial seekTo failed"); for (i = 0; i < 100; i++) { KeyValue kv = testKvs.get(rand.nextInt(testKvs.size())); - assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv)); + assertEquals(0, scanner.seekTo(kv), "Unable to find KV as expected: " + kv); } } finally { scanner.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java index edab56a7afce..726ea5fd4e7d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java @@ -23,14 +23,12 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test a case when an inline index chunk is converted to a root one. This reproduces the bug in @@ -43,13 +41,10 @@ * entry to the root-level block, and that would prevent us from upgrading the leaf-level chunk to * the root chunk, thus not triggering the bug. */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHFileInlineToRootChunkConversion { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileInlineToRootChunkConversion.class); - private final HBaseTestingUtility testUtil = new HBaseTestingUtility(); private final Configuration conf = testUtil.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePrettyPrinter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePrettyPrinter.java index 9ec5dc1c576f..ee3d6d39c177 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePrettyPrinter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePrettyPrinter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.codahale.metrics.MetricRegistry; import java.io.ByteArrayOutputStream; @@ -27,28 +27,22 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHFilePrettyPrinter { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFilePrettyPrinter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFilePrettyPrinter.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); @@ -61,7 +55,7 @@ public class TestHFilePrettyPrinter { private static PrintStream ps; private static ByteArrayOutputStream stream; - @Before + @BeforeEach public void setup() throws Exception { conf = UTIL.getConfiguration(); // Runs on local filesystem. Test does not need sync. Turn off checks. @@ -71,7 +65,7 @@ public void setup() throws Exception { ps = new PrintStream(stream); } - @After + @AfterEach public void teardown() { original = System.out; System.setOut(original); @@ -81,8 +75,8 @@ public void teardown() { public void testHFilePrettyPrinterNonRootDir() throws Exception { Path fileNotInRootDir = UTIL.getDataTestDir("hfile"); TestHRegionServerBulkLoad.createHFile(fs, fileNotInRootDir, cf, fam, value, 1000); - assertNotEquals("directory used is not an HBase root dir", UTIL.getDefaultRootDirPath(), - fileNotInRootDir); + assertNotEquals(UTIL.getDefaultRootDirPath(), fileNotInRootDir, + "directory used is not an HBase root dir"); System.setOut(ps); new HFilePrettyPrinter(conf).run(new String[] { "-v", String.valueOf(fileNotInRootDir) }); @@ -97,7 +91,7 @@ public void testHFilePrettyPrinterRootDir() throws Exception { String rootString = rootPath + rootPath.SEPARATOR; Path fileInRootDir = new Path(rootString + "hfile"); TestHRegionServerBulkLoad.createHFile(fs, fileInRootDir, cf, fam, value, 1000); - assertTrue("directory used is a root dir", fileInRootDir.toString().startsWith(rootString)); + assertTrue(fileInRootDir.toString().startsWith(rootString), "directory used is a root dir"); System.setOut(ps); HFilePrettyPrinter printer = new HFilePrettyPrinter(); @@ -113,8 +107,8 @@ public void testHFilePrettyPrinterRootDir() throws Exception { public void testHFilePrettyPrinterSeekFirstRow() throws Exception { Path fileNotInRootDir = UTIL.getDataTestDir("hfile"); TestHRegionServerBulkLoad.createHFile(fs, fileNotInRootDir, cf, fam, value, 1000); - assertNotEquals("directory used is not an HBase root dir", UTIL.getDefaultRootDirPath(), - fileNotInRootDir); + assertNotEquals(UTIL.getDefaultRootDirPath(), fileNotInRootDir, + "directory used is not an HBase root dir"); HFile.Reader reader = HFile.createReader(fs, fileNotInRootDir, CacheConfig.DISABLED, true, conf); @@ -132,8 +126,8 @@ public void testHFilePrettyPrinterSeekFirstRow() throws Exception { public void testHistograms() throws Exception { Path fileNotInRootDir = UTIL.getDataTestDir("hfile"); TestHRegionServerBulkLoad.createHFile(fs, fileNotInRootDir, cf, fam, value, 1000); - assertNotEquals("directory used is not an HBase root dir", UTIL.getDefaultRootDirPath(), - fileNotInRootDir); + assertNotEquals(UTIL.getDefaultRootDirPath(), fileNotInRootDir, + "directory used is not an HBase root dir"); System.setOut(ps); new HFilePrettyPrinter(conf).run(new String[] { "-s", "-d", String.valueOf(fileNotInRootDir) }); @@ -158,8 +152,8 @@ public void testHistograms() throws Exception { private void assertContainsRanges(String result, int... rangeCountPairs) { for (int i = 0; i < rangeCountPairs.length - 1; i += 2) { String expected = rangeCountPairs[i + 1] + " <= " + rangeCountPairs[i]; - assertTrue("expected:\n" + result + "\nto contain: '" + expected + "'", - result.contains(expected)); + assertTrue(result.contains(expected), + "expected:\n" + result + "\nto contain: '" + expected + "'"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java index 1eb5ac02607f..146bfa6bc785 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; @@ -32,29 +32,24 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; /** * Test */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHFileReaderImpl { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileReaderImpl.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); static KeyValue toKV(String row) { @@ -103,21 +98,21 @@ public void testRecordBlockSize() throws IOException { scanner.seekTo(); scanner.recordBlockSize( - size -> assertTrue("expected non-zero block size on first request", size > 0)); + size -> assertTrue(size > 0, "expected non-zero block size on first request")); scanner.recordBlockSize( - size -> assertEquals("expected zero block size on second request", 0, (int) size)); + size -> assertEquals(0, (int) size, "expected zero block size on second request")); AtomicInteger blocks = new AtomicInteger(0); while (scanner.next()) { scanner.recordBlockSize(size -> { blocks.incrementAndGet(); // there's only 2 cells in the second block - assertTrue("expected remaining block to be less than block size", - size < toKV("a").getLength() * 3); + assertTrue(size < toKV("a").getLength() * 3, + "expected remaining block to be less than block size"); }); } - assertEquals("expected only one remaining block but got " + blocks.get(), 1, blocks.get()); + assertEquals(1, blocks.get(), "expected only one remaining block but got " + blocks.get()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileScannerImplReferenceCount.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileScannerImplReferenceCount.java index 85a0ac33d558..d3a95a7b19b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileScannerImplReferenceCount.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileScannerImplReferenceCount.java @@ -25,17 +25,20 @@ import static org.apache.hadoop.hbase.io.ByteBuffAllocator.MIN_ALLOCATE_SIZE_KEY; import static org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.MAX_CHUNK_SIZE_KEY; import static org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; import java.util.Random; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -49,42 +52,32 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@RunWith(Parameterized.class) -@Category({ IOTests.class, LargeTests.class }) +@Tag(IOTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: ioengine={0}") public class TestHFileScannerImplReferenceCount { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileScannerImplReferenceCount.class); + public static Stream parameters() { + return Stream.of(Arguments.of("file"), Arguments.of("offheap"), Arguments.of("mmap"), + Arguments.of("pmem")); + } - @Rule - public TestName CASE = new TestName(); + private String ioengine; - @Parameters(name = "{index}: ioengine={0}") - public static Collection data() { - return Arrays.asList(new Object[] { "file" }, new Object[] { "offheap" }, - new Object[] { "mmap" }, new Object[] { "pmem" }); + public TestHFileScannerImplReferenceCount(String ioengine) { + this.ioengine = ioengine; } - @Parameter - public String ioengine; - private static final Logger LOG = LoggerFactory.getLogger(TestHFileScannerImplReferenceCount.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); @@ -109,7 +102,7 @@ private static byte[] randLongBytes() { private Cell secondCell = null; private ByteBuffAllocator allocator; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() { Configuration conf = UTIL.getConfiguration(); // Set the max chunk size and min entries key to be very small for index block, so that we can @@ -125,9 +118,9 @@ public static void setUpBeforeClass() { conf.setInt(MIN_ALLOCATE_SIZE_KEY, 0); } - @Before - public void setUp() throws IOException { - String caseName = CASE.getMethodName().replaceAll("[^a-zA-Z0-9]", "_"); + @BeforeEach + public void setUp(TestInfo testInfo) throws IOException { + String caseName = testInfo.getDisplayName().replaceAll("[^a-zA-Z0-9]", "_"); this.workDir = UTIL.getDataTestDir(caseName); if (!"offheap".equals(ioengine)) { ioengine = ioengine + ":" + workDir.toString() + "/cachedata"; @@ -142,17 +135,17 @@ public void setUp() throws IOException { LOG.info("Start to write {} cells into hfile: {}, case:{}", CELL_COUNT, hfilePath, caseName); } - @After + @AfterEach public void tearDown() throws IOException { this.allocator.clean(); this.fs.delete(this.workDir, true); } private void waitBucketCacheFlushed(BlockCache cache) throws InterruptedException { - Assert.assertTrue(cache instanceof CombinedBlockCache); + assertTrue(cache instanceof CombinedBlockCache); BlockCache[] blockCaches = cache.getBlockCaches(); - Assert.assertEquals(blockCaches.length, 2); - Assert.assertTrue(blockCaches[1] instanceof BucketCache); + assertEquals(blockCaches.length, 2); + assertTrue(blockCaches[1] instanceof BucketCache); TestBucketCache.waitUntilAllFlushedToBucket((BucketCache) blockCaches[1]); } @@ -189,30 +182,30 @@ private void testReleaseBlock(Algorithm compression, DataBlockEncoding encoding) HFileBlock curBlock, prevBlock; BlockCache defaultBC = BlockCacheFactory.createBlockCache(conf); CacheConfig cacheConfig = new CacheConfig(conf, null, defaultBC, allocator); - Assert.assertNotNull(defaultBC); - Assert.assertTrue(cacheConfig.isCombinedBlockCache()); + assertNotNull(defaultBC); + assertTrue(cacheConfig.isCombinedBlockCache()); HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConfig, true, conf); - Assert.assertTrue(reader instanceof HFileReaderImpl); + assertTrue(reader instanceof HFileReaderImpl); // We've build a HFile tree with index = 16. - Assert.assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); + assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); HFileScannerImpl scanner = (HFileScannerImpl) reader.getScanner(conf, true, true, false); HFileBlock block1 = reader.getDataBlockIndexReader() .loadDataBlockWithScanInfo(firstCell, null, true, true, false, DataBlockEncoding.NONE, reader) .getHFileBlock(); waitBucketCacheFlushed(defaultBC); - Assert.assertTrue(block1.getBlockType().isData()); - Assert.assertFalse(block1 instanceof ExclusiveMemHFileBlock); + assertTrue(block1.getBlockType().isData()); + assertFalse(block1 instanceof ExclusiveMemHFileBlock); HFileBlock block2 = reader.getDataBlockIndexReader().loadDataBlockWithScanInfo(secondCell, null, true, true, false, DataBlockEncoding.NONE, reader).getHFileBlock(); waitBucketCacheFlushed(defaultBC); - Assert.assertTrue(block2.getBlockType().isData()); - Assert.assertFalse(block2 instanceof ExclusiveMemHFileBlock); + assertTrue(block2.getBlockType().isData()); + assertFalse(block2 instanceof ExclusiveMemHFileBlock); // Only one refCnt for RPC path. - Assert.assertEquals(block1.refCnt(), 1); - Assert.assertEquals(block2.refCnt(), 1); - Assert.assertFalse(block1 == block2); + assertEquals(block1.refCnt(), 1); + assertEquals(block2.refCnt(), 1); + assertFalse(block1 == block2); scanner.seekTo(firstCell); curBlock = scanner.curBlock; @@ -221,7 +214,7 @@ private void testReleaseBlock(Algorithm compression, DataBlockEncoding encoding) // Seek to the block again, the curBlock won't change and won't read from BlockCache. so // refCnt should be unchanged. scanner.seekTo(firstCell); - Assert.assertTrue(curBlock == scanner.curBlock); + assertTrue(curBlock == scanner.curBlock); this.assertRefCnt(curBlock, 2); prevBlock = curBlock; @@ -246,16 +239,16 @@ private void testReleaseBlock(Algorithm compression, DataBlockEncoding encoding) this.assertRefCnt(curBlock, 1); // Finish the block & block2 RPC path - Assert.assertTrue(block1.release()); - Assert.assertTrue(block2.release()); + assertTrue(block1.release()); + assertTrue(block2.release()); // Evict the LRUBlockCache - Assert.assertTrue(defaultBC.evictBlocksByHfileName(hfilePath.getName()) >= 2); - Assert.assertEquals(prevBlock.refCnt(), 0); - Assert.assertEquals(curBlock.refCnt(), 0); + assertTrue(defaultBC.evictBlocksByHfileName(hfilePath.getName()) >= 2); + assertEquals(prevBlock.refCnt(), 0); + assertEquals(curBlock.refCnt(), 0); int count = 0; - Assert.assertTrue(scanner.seekTo()); + assertTrue(scanner.seekTo()); ++count; while (scanner.next()) { count++; @@ -266,57 +259,57 @@ private void testReleaseBlock(Algorithm compression, DataBlockEncoding encoding) /** * See HBASE-22480 */ - @Test + @TestTemplate public void testSeekBefore() throws Exception { HFileBlock curBlock, prevBlock; writeHFile(conf, fs, hfilePath, Algorithm.NONE, DataBlockEncoding.NONE, CELL_COUNT); BlockCache defaultBC = BlockCacheFactory.createBlockCache(conf); CacheConfig cacheConfig = new CacheConfig(conf, null, defaultBC, allocator); - Assert.assertNotNull(defaultBC); - Assert.assertTrue(cacheConfig.isCombinedBlockCache()); + assertNotNull(defaultBC); + assertTrue(cacheConfig.isCombinedBlockCache()); HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConfig, true, conf); - Assert.assertTrue(reader instanceof HFileReaderImpl); + assertTrue(reader instanceof HFileReaderImpl); // We've build a HFile tree with index = 16. - Assert.assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); + assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); HFileScannerImpl scanner = (HFileScannerImpl) reader.getScanner(conf, true, true, false); HFileBlock block1 = reader.getDataBlockIndexReader() .loadDataBlockWithScanInfo(firstCell, null, true, true, false, DataBlockEncoding.NONE, reader) .getHFileBlock(); - Assert.assertTrue(block1.getBlockType().isData()); - Assert.assertFalse(block1 instanceof ExclusiveMemHFileBlock); + assertTrue(block1.getBlockType().isData()); + assertFalse(block1 instanceof ExclusiveMemHFileBlock); HFileBlock block2 = reader.getDataBlockIndexReader().loadDataBlockWithScanInfo(secondCell, null, true, true, false, DataBlockEncoding.NONE, reader).getHFileBlock(); - Assert.assertTrue(block2.getBlockType().isData()); - Assert.assertFalse(block2 instanceof ExclusiveMemHFileBlock); + assertTrue(block2.getBlockType().isData()); + assertFalse(block2 instanceof ExclusiveMemHFileBlock); // Wait until flushed to IOEngine; waitBucketCacheFlushed(defaultBC); // One RPC reference path. - Assert.assertEquals(block1.refCnt(), 1); - Assert.assertEquals(block2.refCnt(), 1); + assertEquals(block1.refCnt(), 1); + assertEquals(block2.refCnt(), 1); // Let the curBlock refer to block2. scanner.seekTo(secondCell); curBlock = scanner.curBlock; - Assert.assertFalse(curBlock == block2); - Assert.assertEquals(1, block2.refCnt()); + assertFalse(curBlock == block2); + assertEquals(1, block2.refCnt()); this.assertRefCnt(curBlock, 2); prevBlock = scanner.curBlock; // Release the block1, no other reference. - Assert.assertTrue(block1.release()); - Assert.assertEquals(0, block1.refCnt()); + assertTrue(block1.release()); + assertEquals(0, block1.refCnt()); // Release the block2, no other reference. - Assert.assertTrue(block2.release()); - Assert.assertEquals(0, block2.refCnt()); + assertTrue(block2.release()); + assertEquals(0, block2.refCnt()); // Do the seekBefore: the newBlock will be the previous block of curBlock. - Assert.assertTrue(scanner.seekBefore(secondCell)); - Assert.assertEquals(scanner.prevBlocks.size(), 1); - Assert.assertTrue(scanner.prevBlocks.get(0) == prevBlock); + assertTrue(scanner.seekBefore(secondCell)); + assertEquals(scanner.prevBlocks.size(), 1); + assertTrue(scanner.prevBlocks.get(0) == prevBlock); curBlock = scanner.curBlock; // the curBlock is read from IOEngine, so a different block. - Assert.assertFalse(curBlock == block1); + assertFalse(curBlock == block1); // Two reference for curBlock: 1. scanner; 2. blockCache. this.assertRefCnt(curBlock, 2); // Reference count of prevBlock must be unchanged because we haven't shipped. @@ -324,20 +317,20 @@ public void testSeekBefore() throws Exception { // Do the shipped scanner.shipped(); - Assert.assertEquals(scanner.prevBlocks.size(), 0); - Assert.assertNotNull(scanner.curBlock); + assertEquals(scanner.prevBlocks.size(), 0); + assertNotNull(scanner.curBlock); this.assertRefCnt(curBlock, 2); this.assertRefCnt(prevBlock, 1); // Do the close scanner.close(); - Assert.assertNull(scanner.curBlock); + assertNull(scanner.curBlock); this.assertRefCnt(curBlock, 1); this.assertRefCnt(prevBlock, 1); - Assert.assertTrue(defaultBC.evictBlocksByHfileName(hfilePath.getName()) >= 2); - Assert.assertEquals(0, curBlock.refCnt()); - Assert.assertEquals(0, prevBlock.refCnt()); + assertTrue(defaultBC.evictBlocksByHfileName(hfilePath.getName()) >= 2); + assertEquals(0, curBlock.refCnt()); + assertEquals(0, prevBlock.refCnt()); // Reload the block1 again. block1 = reader.getDataBlockIndexReader() @@ -345,55 +338,55 @@ public void testSeekBefore() throws Exception { .getHFileBlock(); // Wait until flushed to IOEngine; waitBucketCacheFlushed(defaultBC); - Assert.assertTrue(block1.getBlockType().isData()); - Assert.assertFalse(block1 instanceof ExclusiveMemHFileBlock); - Assert.assertTrue(block1.release()); - Assert.assertEquals(0, block1.refCnt()); + assertTrue(block1.getBlockType().isData()); + assertFalse(block1 instanceof ExclusiveMemHFileBlock); + assertTrue(block1.release()); + assertEquals(0, block1.refCnt()); // Re-seek to the begin. - Assert.assertTrue(scanner.seekTo()); + assertTrue(scanner.seekTo()); curBlock = scanner.curBlock; - Assert.assertFalse(curBlock == block1); + assertFalse(curBlock == block1); this.assertRefCnt(curBlock, 2); // Return false because firstCell <= c[0] - Assert.assertFalse(scanner.seekBefore(firstCell)); + assertFalse(scanner.seekBefore(firstCell)); // The block1 shouldn't be released because we still don't do the shipped or close. this.assertRefCnt(curBlock, 2); scanner.close(); this.assertRefCnt(curBlock, 1); - Assert.assertTrue(defaultBC.evictBlocksByHfileName(hfilePath.getName()) >= 1); - Assert.assertEquals(0, curBlock.refCnt()); + assertTrue(defaultBC.evictBlocksByHfileName(hfilePath.getName()) >= 1); + assertEquals(0, curBlock.refCnt()); } private void assertRefCnt(HFileBlock block, int value) { if (ioengine.startsWith("offheap") || ioengine.startsWith("pmem")) { - Assert.assertEquals(value, block.refCnt()); + assertEquals(value, block.refCnt()); } else { - Assert.assertEquals(value - 1, block.refCnt()); + assertEquals(value - 1, block.refCnt()); } } - @Test + @TestTemplate public void testDefault() throws Exception { testReleaseBlock(Algorithm.NONE, DataBlockEncoding.NONE); } - @Test + @TestTemplate public void testCompression() throws Exception { testReleaseBlock(Algorithm.GZ, DataBlockEncoding.NONE); } - @Test + @TestTemplate public void testDataBlockEncoding() throws Exception { testReleaseBlock(Algorithm.NONE, DataBlockEncoding.ROW_INDEX_V1); } - @Test + @TestTemplate public void testDataBlockEncodingAndCompression() throws Exception { testReleaseBlock(Algorithm.GZ, DataBlockEncoding.ROW_INDEX_V1); } - @Test + @TestTemplate public void testWithLruBlockCache() throws Exception { HFileBlock curBlock; writeHFile(conf, fs, hfilePath, Algorithm.NONE, DataBlockEncoding.NONE, CELL_COUNT); @@ -401,70 +394,70 @@ public void testWithLruBlockCache() throws Exception { conf.set(BUCKET_CACHE_IOENGINE_KEY, ""); BlockCache defaultBC = BlockCacheFactory.createBlockCache(conf); CacheConfig cacheConfig = new CacheConfig(conf, null, defaultBC, allocator); - Assert.assertNotNull(defaultBC); - Assert.assertFalse(cacheConfig.isCombinedBlockCache()); // Must be LruBlockCache. + assertNotNull(defaultBC); + assertFalse(cacheConfig.isCombinedBlockCache()); // Must be LruBlockCache. HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConfig, true, conf); - Assert.assertTrue(reader instanceof HFileReaderImpl); + assertTrue(reader instanceof HFileReaderImpl); // We've build a HFile tree with index = 16. - Assert.assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); + assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); HFileScannerImpl scanner = (HFileScannerImpl) reader.getScanner(conf, true, true, false); HFileBlock block1 = reader.getDataBlockIndexReader() .loadDataBlockWithScanInfo(firstCell, null, true, true, false, DataBlockEncoding.NONE, reader) .getHFileBlock(); - Assert.assertTrue(block1.getBlockType().isData()); - Assert.assertTrue(block1 instanceof ExclusiveMemHFileBlock); + assertTrue(block1.getBlockType().isData()); + assertTrue(block1 instanceof ExclusiveMemHFileBlock); HFileBlock block2 = reader.getDataBlockIndexReader().loadDataBlockWithScanInfo(secondCell, null, true, true, false, DataBlockEncoding.NONE, reader).getHFileBlock(); - Assert.assertTrue(block2.getBlockType().isData()); - Assert.assertTrue(block2 instanceof ExclusiveMemHFileBlock); + assertTrue(block2.getBlockType().isData()); + assertTrue(block2 instanceof ExclusiveMemHFileBlock); // One RPC reference path. - Assert.assertEquals(block1.refCnt(), 0); - Assert.assertEquals(block2.refCnt(), 0); + assertEquals(block1.refCnt(), 0); + assertEquals(block2.refCnt(), 0); scanner.seekTo(firstCell); curBlock = scanner.curBlock; - Assert.assertTrue(curBlock == block1); - Assert.assertEquals(curBlock.refCnt(), 0); - Assert.assertTrue(scanner.prevBlocks.isEmpty()); + assertTrue(curBlock == block1); + assertEquals(curBlock.refCnt(), 0); + assertTrue(scanner.prevBlocks.isEmpty()); // Switch to next block scanner.seekTo(secondCell); curBlock = scanner.curBlock; - Assert.assertTrue(curBlock == block2); - Assert.assertEquals(curBlock.refCnt(), 0); - Assert.assertEquals(curBlock.retain().refCnt(), 0); + assertTrue(curBlock == block2); + assertEquals(curBlock.refCnt(), 0); + assertEquals(curBlock.retain().refCnt(), 0); // Only pooled HFileBlock will be kept in prevBlocks and ExclusiveMemHFileBlock will never keep // in prevBlocks. - Assert.assertTrue(scanner.prevBlocks.isEmpty()); + assertTrue(scanner.prevBlocks.isEmpty()); // close the scanner scanner.close(); - Assert.assertNull(scanner.curBlock); - Assert.assertTrue(scanner.prevBlocks.isEmpty()); + assertNull(scanner.curBlock); + assertTrue(scanner.prevBlocks.isEmpty()); } - @Test + @TestTemplate public void testDisabledBlockCache() throws Exception { writeHFile(conf, fs, hfilePath, Algorithm.NONE, DataBlockEncoding.NONE, CELL_COUNT); // Set LruBlockCache conf.setFloat(HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f); BlockCache defaultBC = BlockCacheFactory.createBlockCache(conf); - Assert.assertNull(defaultBC); + assertNull(defaultBC); CacheConfig cacheConfig = new CacheConfig(conf, null, defaultBC, allocator); - Assert.assertFalse(cacheConfig.isCombinedBlockCache()); // Must be LruBlockCache. + assertFalse(cacheConfig.isCombinedBlockCache()); // Must be LruBlockCache. HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConfig, true, conf); - Assert.assertTrue(reader instanceof HFileReaderImpl); + assertTrue(reader instanceof HFileReaderImpl); // We've build a HFile tree with index = 16. - Assert.assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); + assertEquals(16, reader.getTrailer().getNumDataIndexLevels()); HFileBlock block1 = reader.getDataBlockIndexReader() .loadDataBlockWithScanInfo(firstCell, null, true, true, false, DataBlockEncoding.NONE, reader) .getHFileBlock(); - Assert.assertTrue(block1.isSharedMem()); - Assert.assertTrue(block1 instanceof SharedMemHFileBlock); - Assert.assertEquals(1, block1.refCnt()); - Assert.assertTrue(block1.release()); + assertTrue(block1.isSharedMem()); + assertTrue(block1 instanceof SharedMemHFileBlock); + assertEquals(1, block1.refCnt()); + assertTrue(block1.release()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java index 3e5e20858ccb..8ffcdc46641e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java @@ -26,20 +26,19 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RawLocalFileSystem; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.RandomDistribution; import org.apache.hadoop.io.BytesWritable; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,15 +58,12 @@ * Remove after tfile is committed and use the tfile version of this class instead. *

*/ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestHFileSeek { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileSeek.class); - - private static final byte[] CF = "f1".getBytes(); - private static final byte[] QUAL = "q1".getBytes(); + private static final byte[] CF = Bytes.toBytes("f1"); + private static final byte[] QUAL = Bytes.toBytes("q1"); private static final boolean USE_PREAD = true; private MyOptions options; private Configuration conf; @@ -80,7 +76,7 @@ public class TestHFileSeek { private static final Logger LOG = LoggerFactory.getLogger(TestHFileSeek.class); - @Before + @BeforeEach public void setUp() throws IOException { if (options == null) { options = new MyOptions(new String[0]); @@ -107,7 +103,7 @@ public void setUp() throws IOException { kvGen = new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen, options.dictSize); } - @After + @AfterEach public void tearDown() { try { fs.close(); @@ -162,10 +158,10 @@ private void createTFile() throws IOException { double duration = (double) timer.read() / 1000; // in us. long fsize = fs.getFileStatus(path).getLen(); - System.out.printf("time: %s...uncompressed: %.2fMB...raw thrpt: %.2fMB/s\n", timer.toString(), - (double) totalBytes / 1024 / 1024, totalBytes / duration); - System.out.printf("time: %s...file size: %.2fMB...disk thrpt: %.2fMB/s\n", timer.toString(), - (double) fsize / 1024 / 1024, fsize / duration); + LOG.info(String.format("time: %s...uncompressed: %.2fMB...raw thrpt: %.2fMB/s\n", + timer.toString(), (double) totalBytes / 1024 / 1024, totalBytes / duration)); + LOG.info(String.format("time: %s...file size: %.2fMB...disk thrpt: %.2fMB/s\n", + timer.toString(), (double) fsize / 1024 / 1024, fsize / duration)); } public void seekTFile() throws IOException { @@ -194,9 +190,9 @@ public void seekTFile() throws IOException { } } timer.stop(); - System.out.printf("time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", + LOG.info(String.format("time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount), - options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss)); + options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss))); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index 40220f07bb72..08b073e6798a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -17,17 +17,17 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Random; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -35,8 +35,8 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -53,27 +53,20 @@ import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableUtils; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Testing writing a version 3 {@link HFile}. */ -@RunWith(Parameterized.class) -@Category({ IOTests.class, SmallTests.class }) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: useTags={0}") public class TestHFileWriterV3 { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileWriterV3.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFileWriterV3.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Random RNG = new Random(9713312); // Just a fixed seed. @@ -86,18 +79,17 @@ public TestHFileWriterV3(boolean useTags) { this.useTags = useTags; } - @Parameters - public static Collection parameters() { - return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED; + public static Stream parameters() { + return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED.stream().map(arr -> Arguments.of(arr)); } - @Before + @BeforeEach public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); fs = FileSystem.get(conf); } - @Test + @TestTemplate public void testHFileFormatV3() throws IOException { testHFileFormatV3Internals(useTags); } @@ -109,7 +101,7 @@ private void testHFileFormatV3Internals(boolean useTags) throws IOException { writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags); } - @Test + @TestTemplate public void testMidKeyInHFile() throws IOException { testMidKeyInHFileInternals(useTags); } @@ -197,7 +189,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Algorithm compressAlgo, i hfile.initMetaAndIndex(reader); if (findMidKey) { Cell midkey = dataBlockIndexReader.midkey(reader); - assertNotNull("Midkey should not be null", midkey); + assertNotNull(midkey, "Midkey should not be null"); } // Meta index. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java index 09c259479258..92e2814dca19 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Random; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -31,7 +33,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparatorImpl; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -48,27 +50,20 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.Text; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Testing writing a version 3 {@link HFile} for all encoded blocks */ -@RunWith(Parameterized.class) -@Category({ IOTests.class, MediumTests.class }) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: useTags={0}, dataBlockEncoding={1}") public class TestHFileWriterV3WithDataEncoders { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileWriterV3WithDataEncoders.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFileWriterV3WithDataEncoders.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -84,28 +79,26 @@ public TestHFileWriterV3WithDataEncoders(boolean useTags, DataBlockEncoding data this.dataBlockEncoding = dataBlockEncoding; } - @Parameterized.Parameters - public static Collection parameters() { + public static Stream parameters() { DataBlockEncoding[] dataBlockEncodings = DataBlockEncoding.values(); - Object[][] params = new Object[dataBlockEncodings.length * 2 - 2][]; - int i = 0; + Stream.Builder builder = Stream.builder(); for (DataBlockEncoding dataBlockEncoding : dataBlockEncodings) { if (dataBlockEncoding == DataBlockEncoding.NONE) { continue; } - params[i++] = new Object[] { false, dataBlockEncoding }; - params[i++] = new Object[] { true, dataBlockEncoding }; + builder.add(Arguments.of(false, dataBlockEncoding)); + builder.add(Arguments.of(true, dataBlockEncoding)); } - return Arrays.asList(params); + return builder.build(); } - @Before + @BeforeEach public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); fs = FileSystem.get(conf); } - @Test + @TestTemplate public void testHFileFormatV3() throws IOException { testHFileFormatV3Internals(useTags); } @@ -117,7 +110,7 @@ private void testHFileFormatV3Internals(boolean useTags) throws IOException { writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags); } - @Test + @TestTemplate public void testMidKeyInHFile() throws IOException { testMidKeyInHFileInternals(useTags); } @@ -147,8 +140,8 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com long fileSize = fs.getFileStatus(hfilePath).getLen(); FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize); - Assert.assertEquals(3, trailer.getMajorVersion()); - Assert.assertEquals(entryCount, trailer.getEntryCount()); + assertEquals(3, trailer.getMajorVersion()); + assertEquals(entryCount, trailer.getEntryCount()); HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo) .withIncludesMvcc(true).withIncludesTags(useTags).withDataBlockEncoding(dataBlockEncoding) .withHBaseCheckSum(true).build(); @@ -180,7 +173,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com hfile.initMetaAndIndex(reader); if (findMidKey) { Cell midkey = dataBlockIndexReader.midkey(reader); - Assert.assertNotNull("Midkey should not be null", midkey); + assertNotNull(midkey, "Midkey should not be null"); } // Meta index. @@ -213,7 +206,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com trailer.getLoadOnOpenDataOffset()); HFileBlock block = blockReader.readBlockData(curBlockPos, -1, false, false, true).unpack(context, blockReader); - Assert.assertEquals(BlockType.META, block.getBlockType()); + assertEquals(BlockType.META, block.getBlockType()); Text t = new Text(); ByteBuff buf = block.getBufferWithoutHeader(); if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) { @@ -223,7 +216,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com Text expectedText = (metaCounter == 0 ? new Text("Paris") : metaCounter == 1 ? new Text("Moscow") : new Text("Washington, D.C.")); - Assert.assertEquals(expectedText, t); + assertEquals(expectedText, t); LOG.info("Read meta block data: " + t); ++metaCounter; curBlockPos += block.getOnDiskSizeWithHeader(); @@ -244,7 +237,7 @@ private long scanBlocks(int entryCount, HFileContext context, List key HFileBlockDecodingContext ctx = blockReader.getBlockDecodingContext(); HFileBlock block = blockReader.readBlockData(curBlockPos, -1, false, false, true).unpack(context, blockReader); - Assert.assertEquals(BlockType.ENCODED_DATA, block.getBlockType()); + assertEquals(BlockType.ENCODED_DATA, block.getBlockType()); ByteBuff origBlock = block.getBufferReadOnly(); int pos = block.headerSize() + DataBlockEncoding.ID_SIZE; origBlock.position(pos); @@ -255,19 +248,19 @@ private long scanBlocks(int entryCount, HFileContext context, List key seeker.setCurrentBuffer(buf); Cell res = seeker.getCell(); KeyValue kv = keyValues.get(entriesRead); - Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv)); + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv)); ++entriesRead; while (seeker.next()) { res = seeker.getCell(); kv = keyValues.get(entriesRead); - Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv)); + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv)); ++entriesRead; } ++blocksRead; curBlockPos += block.getOnDiskSizeWithHeader(); } LOG.info("Finished reading: entries={}, blocksRead = {}", entriesRead, blocksRead); - Assert.assertEquals(entryCount, entriesRead); + assertEquals(entryCount, entriesRead); return curBlockPos; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java index f212eeb6bb51..4696372ee2ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java @@ -17,21 +17,21 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Random; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -40,13 +40,11 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,33 +54,33 @@ * A kind of integration test at the intersection of {@link HFileBlock}, {@link CacheConfig}, and * {@link LruBlockCache}. */ -@Category({ IOTests.class, SmallTests.class }) -@RunWith(Parameterized.class) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: cacheOnWrite={0}") public class TestLazyDataBlockDecompression { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLazyDataBlockDecompression.class); private static final Logger LOG = LoggerFactory.getLogger(TestLazyDataBlockDecompression.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Random RNG = new Random(9713312); // Just a fixed seed. private FileSystem fs; - @Parameterized.Parameter(0) - public boolean cacheOnWrite; + private final boolean cacheOnWrite; - @Parameterized.Parameters - public static Iterable data() { - return Arrays.asList(new Object[][] { { false }, { true } }); + public static Stream parameters() { + return Stream.of(Arguments.of(false), Arguments.of(true)); } - @Before + public TestLazyDataBlockDecompression(boolean cacheOnWrite) { + this.cacheOnWrite = cacheOnWrite; + } + + @BeforeEach public void setUp() throws IOException { fs = FileSystem.get(TEST_UTIL.getConfiguration()); } - @After + @AfterEach public void tearDown() { fs = null; } @@ -134,7 +132,7 @@ private static void cacheBlocks(Configuration conf, CacheConfig cacheConfig, Fil reader.close(); } - @Test + @TestTemplate public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception { // enough room for 2 uncompressed block int maxSize = (int) (HConstants.DEFAULT_BLOCKSIZE * 2.1); @@ -156,11 +154,11 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception { assertFalse(cc.isCombinedBlockCache()); LruBlockCache disabledBlockCache = (LruBlockCache) cc.getBlockCache().get(); LOG.info("disabledBlockCache=" + disabledBlockCache); - assertEquals("test inconsistency detected.", maxSize, disabledBlockCache.getMaxSize()); - assertTrue("eviction thread spawned unintentionally.", - disabledBlockCache.getEvictionThread() == null); - assertEquals("freshly created blockcache contains blocks.", 0, - disabledBlockCache.getBlockCount()); + assertEquals(maxSize, disabledBlockCache.getMaxSize(), "test inconsistency detected."); + assertTrue(disabledBlockCache.getEvictionThread() == null, + "eviction thread spawned unintentionally."); + assertEquals(0, disabledBlockCache.getBlockCount(), + "freshly created blockcache contains blocks."); // 2000 kv's is ~3.6 full unencoded data blocks. // Requires a conf and CacheConfig but should not be specific to this instance's cache settings @@ -169,13 +167,13 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception { // populate the cache cacheBlocks(lazyCompressDisabled, cc, fs, hfilePath, context); long disabledBlockCount = disabledBlockCache.getBlockCount(); - assertTrue("blockcache should contain blocks. disabledBlockCount=" + disabledBlockCount, - disabledBlockCount > 0); + assertTrue(disabledBlockCount > 0, + "blockcache should contain blocks. disabledBlockCount=" + disabledBlockCount); long disabledEvictedCount = disabledBlockCache.getStats().getEvictedCount(); for (Map.Entry e : disabledBlockCache.getMapForTests() .entrySet()) { HFileBlock block = (HFileBlock) e.getValue().getBuffer(); - assertTrue("found a packed block, block=" + block, block.isUnpacked()); + assertTrue(block.isUnpacked(), "found a packed block, block=" + block); } // count blocks with lazy decompression @@ -186,20 +184,20 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception { lazyCompressEnabled.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true); cc = new CacheConfig(lazyCompressEnabled, new LruBlockCache(maxSize, HConstants.DEFAULT_BLOCKSIZE, false, lazyCompressEnabled)); - assertTrue("test improperly configured.", cc.shouldCacheDataCompressed()); + assertTrue(cc.shouldCacheDataCompressed(), "test improperly configured."); assertTrue(cc.getBlockCache().get() instanceof LruBlockCache); LruBlockCache enabledBlockCache = (LruBlockCache) cc.getBlockCache().get(); LOG.info("enabledBlockCache=" + enabledBlockCache); - assertEquals("test inconsistency detected", maxSize, enabledBlockCache.getMaxSize()); - assertTrue("eviction thread spawned unintentionally.", - enabledBlockCache.getEvictionThread() == null); - assertEquals("freshly created blockcache contains blocks.", 0, - enabledBlockCache.getBlockCount()); + assertEquals(maxSize, enabledBlockCache.getMaxSize(), "test inconsistency detected"); + assertTrue(enabledBlockCache.getEvictionThread() == null, + "eviction thread spawned unintentionally."); + assertEquals(0, enabledBlockCache.getBlockCount(), + "freshly created blockcache contains blocks."); cacheBlocks(lazyCompressEnabled, cc, fs, hfilePath, context); long enabledBlockCount = enabledBlockCache.getBlockCount(); - assertTrue("blockcache should contain blocks. enabledBlockCount=" + enabledBlockCount, - enabledBlockCount > 0); + assertTrue(enabledBlockCount > 0, + "blockcache should contain blocks. enabledBlockCount=" + enabledBlockCount); long enabledEvictedCount = enabledBlockCache.getStats().getEvictedCount(); int candidatesFound = 0; for (Map.Entry e : enabledBlockCache.getMapForTests() @@ -207,24 +205,24 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception { candidatesFound++; HFileBlock block = (HFileBlock) e.getValue().getBuffer(); if (cc.shouldCacheCompressed(block.getBlockType().getCategory())) { - assertFalse("found an unpacked block, block=" + block + ", block buffer capacity=" - + block.getBufferWithoutHeader().capacity(), block.isUnpacked()); + assertFalse(block.isUnpacked(), "found an unpacked block, block=" + block + + ", block buffer capacity=" + block.getBufferWithoutHeader().capacity()); } } - assertTrue("did not find any candidates for compressed caching. Invalid test.", - candidatesFound > 0); + assertTrue(candidatesFound > 0, + "did not find any candidates for compressed caching. Invalid test."); LOG.info( "disabledBlockCount=" + disabledBlockCount + ", enabledBlockCount=" + enabledBlockCount); - assertTrue( + assertTrue(disabledBlockCount < enabledBlockCount, "enabling compressed data blocks should increase the effective cache size. " - + "disabledBlockCount=" + disabledBlockCount + ", enabledBlockCount=" + enabledBlockCount, - disabledBlockCount < enabledBlockCount); + + "disabledBlockCount=" + disabledBlockCount + ", enabledBlockCount=" + enabledBlockCount); LOG.info("disabledEvictedCount=" + disabledEvictedCount + ", enabledEvictedCount=" + enabledEvictedCount); - assertTrue("enabling compressed data blocks should reduce the number of evictions. " - + "disabledEvictedCount=" + disabledEvictedCount + ", enabledEvictedCount=" - + enabledEvictedCount, enabledEvictedCount < disabledEvictedCount); + assertTrue(enabledEvictedCount < disabledEvictedCount, + "enabling compressed data blocks should reduce the number of evictions. " + + "disabledEvictedCount=" + disabledEvictedCount + ", enabledEvictedCount=" + + enabledEvictedCount); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java index 00f43fcced94..cb40a77965ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java @@ -18,10 +18,11 @@ package org.apache.hadoop.hbase.io.hfile; import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import java.util.Random; @@ -32,7 +33,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Waiter; @@ -43,10 +43,8 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.ClassSize; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,13 +54,10 @@ * Tests will ensure it grows and shrinks in size properly, evictions run when they're supposed to * and do what they should, and that cached blocks are accessible when expected to be. */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestLruAdaptiveBlockCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLruAdaptiveBlockCache.class); - private static final Logger LOG = LoggerFactory.getLogger(TestLruAdaptiveBlockCache.class); private static final Configuration CONF = HBaseConfiguration.create(); @@ -73,7 +68,7 @@ public void testCacheEvictionThreadSafe() throws Exception { int numBlocks = 9; int testRuns = 10; final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); - assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); + assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken."); final LruAdaptiveBlockCache cache = new LruAdaptiveBlockCache(maxSize, blockSize); EvictionThread evictionThread = cache.getEvictionThread(); @@ -121,7 +116,7 @@ public void testBackgroundEvictionThread() throws Exception { long maxSize = 100000; int numBlocks = 9; long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); - assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); + assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken."); LruAdaptiveBlockCache cache = new LruAdaptiveBlockCache(maxSize, blockSize); EvictionThread evictionThread = cache.getEvictionThread(); @@ -162,7 +157,7 @@ public String explainFailure() throws Exception { for (long prevCnt = 0 /* < number of blocks added */, curCnt = cache.getBlockCount(); prevCnt != curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) { Thread.sleep(200); - assertTrue("Cache never stabilized.", n++ < 100); + assertTrue(n++ < 100, "Cache never stabilized."); } long evictionCount = cache.getStats().getEvictionCount(); @@ -207,8 +202,8 @@ public void testCacheSimple() throws Exception { for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); } - assertEquals("Cache should ignore cache requests for blocks already in cache", - expectedBlockCount, cache.getBlockCount()); + assertEquals(expectedBlockCount, cache.getBlockCount(), + "Cache should ignore cache requests for blocks already in cache"); // Verify correctly calculated cache heap size assertEquals(expectedCacheSize, cache.heapSize()); @@ -978,9 +973,9 @@ static void testMultiThreadGetAndEvictBlockInternal(BlockCache cache) throws Exc t1.join(); t2.join(); t3.join(); - Assert.assertFalse(err1.get()); - Assert.assertFalse(err2.get()); - Assert.assertFalse(err3.get()); + assertFalse(err1.get()); + assertFalse(err2.get()); + assertFalse(err3.get()); } @Test @@ -1003,7 +998,7 @@ public void testSkipCacheDataBlocksInteral(int heavyEvictionCountLimit) throws E long maxSize = 100000000; int numBlocks = 100000; final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); - assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); + assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken."); final LruAdaptiveBlockCache cache = new LruAdaptiveBlockCache(maxSize, blockSize, true, (int) Math.ceil(1.2 * maxSize / blockSize), LruAdaptiveBlockCache.DEFAULT_LOAD_FACTOR, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java index 59cacc154a5e..e2248cb9e6fa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java @@ -18,10 +18,11 @@ package org.apache.hadoop.hbase.io.hfile; import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import java.util.Random; @@ -32,7 +33,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Waiter; @@ -43,10 +43,8 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.ClassSize; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,13 +54,10 @@ * Tests will ensure it grows and shrinks in size properly, evictions run when they're supposed to * and do what they should, and that cached blocks are accessible when expected to be. */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestLruBlockCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLruBlockCache.class); - private static final Logger LOG = LoggerFactory.getLogger(TestLruBlockCache.class); private static final Configuration CONF = HBaseConfiguration.create(); @@ -73,7 +68,7 @@ public void testCacheEvictionThreadSafe() throws Exception { int numBlocks = 9; int testRuns = 10; final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); - assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); + assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken."); final LruBlockCache cache = new LruBlockCache(maxSize, blockSize); EvictionThread evictionThread = cache.getEvictionThread(); @@ -124,7 +119,7 @@ public void testBackgroundEvictionThread() throws Exception { long maxSize = 100000; int numBlocks = 9; long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); - assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); + assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken."); LruBlockCache cache = new LruBlockCache(maxSize, blockSize); EvictionThread evictionThread = cache.getEvictionThread(); @@ -165,7 +160,7 @@ public String explainFailure() throws Exception { for (long prevCnt = 0 /* < number of blocks added */, curCnt = cache.getBlockCount(); prevCnt != curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) { Thread.sleep(200); - assertTrue("Cache never stabilized.", n++ < 100); + assertTrue(n++ < 100, "Cache never stabilized."); } long evictionCount = cache.getStats().getEvictionCount(); @@ -210,8 +205,8 @@ public void testCacheSimple() throws Exception { for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); } - assertEquals("Cache should ignore cache requests for blocks already in cache", - expectedBlockCount, cache.getBlockCount()); + assertEquals(expectedBlockCount, cache.getBlockCount(), + "Cache should ignore cache requests for blocks already in cache"); // Verify correctly calculated cache heap size assertEquals(expectedCacheSize, cache.heapSize()); @@ -980,9 +975,9 @@ static void testMultiThreadGetAndEvictBlockInternal(BlockCache cache) throws Exc t1.join(); t2.join(); t3.join(); - Assert.assertFalse(err1.get()); - Assert.assertFalse(err2.get()); - Assert.assertFalse(err3.get()); + assertFalse(err1.get()); + assertFalse(err2.get()); + assertFalse(err3.get()); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java index eb57b0acd652..ff7e495142e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java @@ -17,30 +17,25 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestLruCachedBlock { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLruCachedBlock.class); - LruCachedBlock block; LruCachedBlock blockEqual; LruCachedBlock blockNotEqual; - @Before + @BeforeEach public void setUp() throws Exception { BlockCacheKey cacheKey = new BlockCacheKey("name", 0); BlockCacheKey otherKey = new BlockCacheKey("name2", 1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java index 8facb1f4b5be..91a4b3369a88 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java @@ -29,12 +29,12 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.not; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule; +import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; import io.opentelemetry.sdk.trace.data.SpanData; import java.io.IOException; import java.util.List; @@ -48,7 +48,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; @@ -81,21 +80,18 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.Pair; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestPrefetch { - private static final Logger LOG = LoggerFactory.getLogger(TestPrefetch.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPrefetch.class); + private static final Logger LOG = LoggerFactory.getLogger(TestPrefetch.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -107,10 +103,10 @@ public class TestPrefetch { private FileSystem fs; private BlockCache blockCache; - @Rule - public OpenTelemetryRule otelRule = OpenTelemetryRule.create(); + @RegisterExtension + private static OpenTelemetryExtension OTEL_EXT = OpenTelemetryExtension.create(); - @Before + @BeforeEach public void setUp() throws IOException, InterruptedException { conf = TEST_UTIL.getConfiguration(); conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true); @@ -170,9 +166,9 @@ public void testPrefetch() throws Exception { readStoreFile(storeFile); }, "testPrefetch"); - TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate<>(otelRule::getSpans, + TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate<>(OTEL_EXT::getSpans, hasItems(hasName("testPrefetch"), hasName("PrefetchExecutor.request")))); - final List spans = otelRule.getSpans(); + final List spans = OTEL_EXT.getSpans(); if (LOG.isDebugEnabled()) { StringTraceRenderer renderer = new StringTraceRenderer(spans); renderer.render(LOG::debug); @@ -329,14 +325,13 @@ public void testPrefetchWithDelay() throws Exception { // Wait for 20 seconds, no thread should start prefetch Thread.sleep(20000); - assertFalse("Prefetch threads should not be running at this point", reader.prefetchStarted()); - long timeout = 10000; + assertFalse(reader.prefetchStarted(), "Prefetch threads should not be running at this point"); Waiter.waitFor(conf, 10000, () -> (reader.prefetchStarted() || reader.prefetchComplete())); assertTrue(reader.prefetchStarted() || reader.prefetchComplete()); - assertTrue("Prefetch should start post configured delay", - getElapsedTime(startTime) > PrefetchExecutor.getPrefetchDelay()); + assertTrue(getElapsedTime(startTime) > PrefetchExecutor.getPrefetchDelay(), + "Prefetch should start post configured delay"); conf.setInt(PREFETCH_DELAY, 1000); conf.setFloat(PREFETCH_DELAY_VARIATION, PREFETCH_DELAY_VARIATION_DEFAULT_VALUE); @@ -514,7 +509,7 @@ private Pair writeStoreFileForSplit(Path storeDir, HFileContext co } } sfw.close(); - return new Pair(sfw.getPath(), splitPoint); + return new Pair<>(sfw.getPath(), splitPoint); } public static KeyValue.Type generateKeyType(Random rand) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java index 7ca5e34e6db5..1e5da4ba8d04 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java @@ -18,14 +18,13 @@ package org.apache.hadoop.hbase.io.hfile; import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.StartMiniClusterOption; @@ -42,21 +41,17 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, LargeTests.class }) +@Tag(IOTests.TAG) +@Tag(LargeTests.TAG) public class TestPrefetchRSClose { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPrefetchRSClose.class); - private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchRSClose.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -67,7 +62,7 @@ public class TestPrefetchRSClose { MiniHBaseCluster cluster; StartMiniClusterOption option = StartMiniClusterOption.builder().numRegionServers(1).build(); - @Before + @BeforeEach public void setup() throws Exception { conf = TEST_UTIL.getConfiguration(); testDir = TEST_UTIL.getDataTestDir(); @@ -130,7 +125,7 @@ public void testPrefetchPersistence() throws Exception { assertTrue(new File(testDir + "/bucket.persistence").exists()); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.cleanupDataTestDirOnTestFS(String.valueOf(testDir)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java index 688802c28e25..3845e4649257 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java @@ -20,11 +20,11 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY; import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY; import static org.apache.hadoop.hbase.io.hfile.BlockCacheFactory.BUCKET_CACHE_BUCKETS_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -37,7 +37,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; @@ -64,30 +63,22 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestPrefetchWithBucketCache { private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchWithBucketCache.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPrefetchWithBucketCache.class); - - @Rule - public TestName name = new TestName(); - private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2; @@ -97,20 +88,20 @@ public class TestPrefetchWithBucketCache { private FileSystem fs; private BlockCache blockCache; - @Before - public void setUp() throws IOException { + @BeforeEach + public void setUp(TestInfo testInfo) throws IOException { conf = TEST_UTIL.getConfiguration(); conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true); fs = HFileSystem.get(conf); - File testDir = new File(name.getMethodName()); + File testDir = new File(testInfo.getTestMethod().get().getName()); testDir.mkdir(); conf.set(BUCKET_CACHE_IOENGINE_KEY, "file:/" + testDir.getAbsolutePath() + "/bucket.cache"); } - @After - public void tearDown() { - File cacheFile = new File(name.getMethodName() + "/bucket.cache"); - File dir = new File(name.getMethodName()); + @AfterEach + public void tearDown(TestInfo testInfo) { + File cacheFile = new File(testInfo.getTestMethod().get().getName() + "/bucket.cache"); + File dir = new File(testInfo.getTestMethod().get().getName()); cacheFile.delete(); dir.delete(); } @@ -283,15 +274,15 @@ public void testPrefetchMetricProgress() throws Exception { } @Test - public void testPrefetchMetricProgressForLinks() throws Exception { + public void testPrefetchMetricProgressForLinks(TestInfo testInfo) throws Exception { conf.setLong(BUCKET_CACHE_SIZE_KEY, 200); blockCache = BlockCacheFactory.createBlockCache(conf); cacheConf = new CacheConfig(conf, blockCache); - final RegionInfo hri = - RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + final RegionInfo hri = RegionInfoBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); - Path testDir = TEST_UTIL.getDataTestDir(name.getMethodName()); + Path testDir = TEST_UTIL.getDataTestDir(testInfo.getTestMethod().get().getName()); CommonFSUtils.setRootDir(testConf, testDir); Path tableDir = CommonFSUtils.getTableDir(testDir, hri.getTable()); RegionInfo region = RegionInfoBuilder.newBuilder(TableName.valueOf(tableDir.getName())).build(); @@ -308,8 +299,8 @@ public void testPrefetchMetricProgressForLinks() throws Exception { Waiter.waitFor(testConf, 300, () -> bc.getBackingMap().size() == 6); long cachedSize = bc.getRegionCachedInfo().get().get(region.getEncodedName()); - final RegionInfo dstHri = - RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + final RegionInfo dstHri = RegionInfoBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); HRegionFileSystem dstRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, CommonFSUtils.getTableDir(testDir, dstHri.getTable()), dstHri); @@ -340,18 +331,18 @@ public void testPrefetchMetricProgressForLinks() throws Exception { } @Test - public void testPrefetchMetricProgressForLinksToArchived() throws Exception { + public void testPrefetchMetricProgressForLinksToArchived(TestInfo testInfo) throws Exception { conf.setLong(BUCKET_CACHE_SIZE_KEY, 200); blockCache = BlockCacheFactory.createBlockCache(conf); cacheConf = new CacheConfig(conf, blockCache); // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); - Path testDir = TEST_UTIL.getDataTestDir(name.getMethodName()); + Path testDir = TEST_UTIL.getDataTestDir(testInfo.getTestMethod().get().getName()); CommonFSUtils.setRootDir(testConf, testDir); - final RegionInfo hri = - RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + final RegionInfo hri = RegionInfoBuilder + .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build(); Path tableDir = CommonFSUtils.getTableDir(testDir, hri.getTable()); RegionInfo region = RegionInfoBuilder.newBuilder(TableName.valueOf(tableDir.getName())).build(); Path regionDir = new Path(tableDir, region.getEncodedName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java index 3089fe36901b..ac6cd15b282f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -25,7 +25,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -33,20 +32,15 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Test; /** * Test {@link HFileScanner#reseekTo(org.apache.hadoop.hbase.Cell)} */ -@Category({ IOTests.class, SmallTests.class }) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(SmallTests.TAG) public class TestReseekTo { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReseekTo.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Test @@ -128,7 +122,7 @@ private void testReseekToInternals(TagUsage tagUsage) throws IOException { long start = System.nanoTime(); scanner.reseekTo(new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"), Bytes.toBytes(value))); - assertEquals("i is " + i, value, scanner.getValueString()); + assertEquals(value, scanner.getValueString(), "i is " + i); } reader.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java index 8653c4d416d5..5cd6f635aaf5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.jupiter.api.Assertions.assertEquals; + import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -26,23 +28,18 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparatorImpl; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestRowIndexV1DataEncoder { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRowIndexV1DataEncoder.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -50,7 +47,7 @@ public class TestRowIndexV1DataEncoder { private FileSystem fs; private DataBlockEncoding dataBlockEncoding; - @Before + @BeforeEach public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); fs = FileSystem.get(conf); @@ -89,7 +86,7 @@ private void writeDataToHFile(Path hfilePath, int entryCount) throws IOException // Without the patch it would have produced 244 blocks (each block of 1236 bytes) // Earlier this would create blocks ~20% greater than the block size of 1024 bytes // After this patch actual block size is ~2% greater than the block size of 1024 bytes - Assert.assertEquals(278, trailer.getDataIndexCount()); + assertEquals(278, trailer.getDataIndexCount()); } private void writeKeyValues(int entryCount, HFile.Writer writer, List keyValues) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java index 2004e20aad6c..44ff11cd7c81 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.io.hfile; import static org.apache.hadoop.hbase.io.ByteBuffAllocator.MIN_ALLOCATE_SIZE_KEY; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; @@ -31,7 +31,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SizeCachedNoTagsByteBufferKeyValue; @@ -41,16 +40,14 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestRowIndexV1RoundTrip { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRowIndexV1RoundTrip.class); + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final DataBlockEncoding DATA_BLOCK_ENCODING = DataBlockEncoding.ROW_INDEX_V1; private static final int ENTRY_COUNT = 100; @@ -58,7 +55,7 @@ public class TestRowIndexV1RoundTrip { private Configuration conf; private FileSystem fs; - @Before + @BeforeEach public void setUp() throws IOException { conf = TEST_UTIL.getConfiguration(); conf.setLong(MIN_ALLOCATE_SIZE_KEY, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java index a5389c75a8c0..33a93e7085fd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -47,25 +46,18 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestScannerFromBucketCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestScannerFromBucketCache.class); - private static final Logger LOG = LoggerFactory.getLogger(TestScannerFromBucketCache.class); - @Rule - public TestName name = new TestName(); HRegion region = null; private HBaseTestingUtility test_util; @@ -76,8 +68,8 @@ public class TestScannerFromBucketCache { // Test names private TableName tableName; - private void setUp(boolean useBucketCache) throws IOException { - test_util = HBaseTestingUtility.createLocalHTU(); + private void setUp(boolean useBucketCache, TestInfo testInfo) throws IOException { + test_util = new HBaseTestingUtility(); conf = test_util.getConfiguration(); if (useBucketCache) { conf.setInt("hbase.bucketcache.size", 400); @@ -86,23 +78,23 @@ private void setUp(boolean useBucketCache) throws IOException { conf.setFloat("hfile.block.cache.size", 0.2f); conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f); } - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); } - @After + @AfterEach public void tearDown() throws Exception { EnvironmentEdgeManagerTestHelper.reset(); LOG.info("Cleaning test directory: " + test_util.getDataTestDir()); test_util.cleanupTestDir(); } - String getName() { - return name.getMethodName(); + String getName(TestInfo testInfo) { + return testInfo.getTestMethod().get().getName(); } @Test - public void testBasicScanWithLRUCache() throws IOException { - setUp(false); + public void testBasicScanWithLRUCache(TestInfo testInfo) throws IOException { + setUp(false, testInfo); byte[] row1 = Bytes.toBytes("row1"); byte[] qf1 = Bytes.toBytes("qualifier1"); byte[] qf2 = Bytes.toBytes("qualifier2"); @@ -113,7 +105,7 @@ public void testBasicScanWithLRUCache() throws IOException { long ts3 = ts1 + 2; // Setting up region - String method = this.getName(); + String method = this.getName(testInfo); this.region = initHRegion(tableName, method, conf, test_util, fam1); try { List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false); @@ -139,8 +131,8 @@ public void testBasicScanWithLRUCache() throws IOException { } @Test - public void testBasicScanWithOffheapBucketCache() throws IOException { - setUp(true); + public void testBasicScanWithOffheapBucketCache(TestInfo testInfo) throws IOException { + setUp(true, testInfo); byte[] row1 = Bytes.toBytes("row1offheap"); byte[] qf1 = Bytes.toBytes("qualifier1"); byte[] qf2 = Bytes.toBytes("qualifier2"); @@ -151,7 +143,7 @@ public void testBasicScanWithOffheapBucketCache() throws IOException { long ts3 = ts1 + 2; // Setting up region - String method = this.getName(); + String method = this.getName(testInfo); this.region = initHRegion(tableName, method, conf, test_util, fam1); try { List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false); @@ -180,8 +172,8 @@ public void testBasicScanWithOffheapBucketCache() throws IOException { } @Test - public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException { - setUp(true); + public void testBasicScanWithOffheapBucketCacheWithMBB(TestInfo testInfo) throws IOException { + setUp(true, testInfo); byte[] row1 = Bytes.toBytes("row1offheap"); byte[] qf1 = Bytes.toBytes("qualifier1"); byte[] qf2 = Bytes.toBytes("qualifier2"); @@ -192,7 +184,7 @@ public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException { long ts3 = ts1 + 2; // Setting up region - String method = this.getName(); + String method = this.getName(testInfo); this.region = initHRegion(tableName, method, conf, test_util, fam1); try { List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java index fa3ecf7a8761..f9b0b2224cb3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -42,26 +42,19 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; /** * Test the optimization that does not scan files where all key ranges are excluded. */ -@RunWith(Parameterized.class) -@Category({ IOTests.class, SmallTests.class }) +@HBaseParameterizedTestTemplate(name = "{0}") +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(SmallTests.TAG) public class TestScannerSelectionUsingKeyRange { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestScannerSelectionUsingKeyRange.class); - - private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static TableName TABLE = TableName.valueOf("myTable"); private static String FAMILY = "myCF"; private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); @@ -78,13 +71,12 @@ public class TestScannerSelectionUsingKeyRange { private BloomType bloomType; private int expectedCount; - @Parameters - public static Collection parameters() { - List params = new ArrayList<>(); + public static Stream parameters() { + List params = new ArrayList<>(); for (Object type : TYPE_COUNT.keySet()) { - params.add(new Object[] { type, TYPE_COUNT.get(type) }); + params.add(Arguments.of(type, TYPE_COUNT.get(type))); } - return params; + return params.stream(); } public TestScannerSelectionUsingKeyRange(Object type, Object count) { @@ -92,12 +84,12 @@ public TestScannerSelectionUsingKeyRange(Object type, Object count) { expectedCount = (Integer) count; } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.cleanupTestDir(); } - @Test + @TestTemplate public void testScannerSelection() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.hstore.compactionThreshold", 10000); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index 6a9667bd8912..612910e21b78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -17,16 +17,16 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -43,26 +43,20 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test the optimization that does not scan files where all timestamps are expired. */ -@RunWith(Parameterized.class) -@Category({ IOTests.class, LargeTests.class }) +@HBaseParameterizedTestTemplate(name = "numFreshFiles={0}") +@Tag(IOTests.TAG) +@Tag(LargeTests.TAG) public class TestScannerSelectionUsingTTL { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestScannerSelectionUsingTTL.class); - private static final Logger LOG = LoggerFactory.getLogger(TestScannerSelectionUsingTTL.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); @@ -79,13 +73,12 @@ public class TestScannerSelectionUsingTTL { public final int numFreshFiles, totalNumFiles; - @Parameters - public static Collection parameters() { - List params = new ArrayList<>(); + public static Stream parameters() { + List params = new ArrayList<>(); for (int numFreshFiles = 1; numFreshFiles <= 3; ++numFreshFiles) { - params.add(new Object[] { numFreshFiles }); + params.add(Arguments.of(numFreshFiles)); } - return params; + return params.stream(); } public TestScannerSelectionUsingTTL(int numFreshFiles) { @@ -93,7 +86,7 @@ public TestScannerSelectionUsingTTL(int numFreshFiles) { this.totalNumFiles = numFreshFiles + NUM_EXPIRED_FILES; } - @Test + @TestTemplate public void testScannerSelection() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.store.delete.expired.storefile", false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java index 6fe90105f816..13153f6d0534 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Random; @@ -27,7 +27,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -39,19 +38,15 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.BloomFilterUtil; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestSeekBeforeWithInlineBlocks { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSeekBeforeWithInlineBlocks.class); - private static final Logger LOG = LoggerFactory.getLogger(TestSeekBeforeWithInlineBlocks.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -164,19 +159,19 @@ public void testMultiIndexLevelRandomHFileWithBlooms() throws IOException { } private void checkSeekBefore(Cell[] cells, HFileScanner scanner, int i) throws IOException { - assertEquals( - "Failed to seek to the key before #" + i + " (" + CellUtil.getCellKeyAsString(cells[i]) + ")", - true, scanner.seekBefore(cells[i])); + assertEquals(true, scanner.seekBefore(cells[i]), "Failed to seek to the key before #" + i + " (" + + CellUtil.getCellKeyAsString(cells[i]) + ")"); } private void checkNoSeekBefore(Cell[] cells, HFileScanner scanner, int i) throws IOException { - assertEquals("Incorrectly succeeded in seeking to before first key (" - + CellUtil.getCellKeyAsString(cells[i]) + ")", false, scanner.seekBefore(cells[i])); + assertEquals(false, scanner.seekBefore(cells[i]), + "Incorrectly succeeded in seeking to before first key (" + + CellUtil.getCellKeyAsString(cells[i]) + ")"); } /** Check a key/value pair after it was read by the reader */ private void checkCell(Cell expected, Cell actual) { - assertTrue(String.format("Expected key %s, but was %s", CellUtil.getCellKeyAsString(expected), - CellUtil.getCellKeyAsString(actual)), CellUtil.equals(expected, actual)); + assertTrue(CellUtil.equals(expected, actual), String.format("Expected key %s, but was %s", + CellUtil.getCellKeyAsString(expected), CellUtil.getCellKeyAsString(actual))); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index 8ddbd95679e5..9eddea90781b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -17,15 +17,15 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Iterator; import java.util.List; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -43,34 +43,31 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test {@link HFileScanner#seekTo(Cell)} and its variants. */ -@Category({ IOTests.class, SmallTests.class }) -@RunWith(Parameterized.class) +@org.junit.jupiter.api.Tag(IOTests.TAG) +@org.junit.jupiter.api.Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: encoding={0}") public class TestSeekTo { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSeekTo.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSeekTo.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final DataBlockEncoding encoding; - @Parameters - public static Collection parameters() { - List paramList = new ArrayList<>(); + public static Stream parameters() { + List paramList = new ArrayList<>(); for (DataBlockEncoding encoding : DataBlockEncoding.values()) { - paramList.add(new Object[] { encoding }); + paramList.add(Arguments.of(encoding)); } - return paramList; + return paramList.stream(); } static boolean switchKVs = false; @@ -79,7 +76,7 @@ public TestSeekTo(DataBlockEncoding encoding) { this.encoding = encoding; } - @Before + @BeforeEach public void setUp() { // reset switchKVs = false; @@ -137,7 +134,7 @@ Path makeNewFile(TagUsage tagUsage) throws IOException { return ncTFile; } - @Test + @TestTemplate public void testSeekBefore() throws Exception { testSeekBeforeInternals(TagUsage.NO_TAG); testSeekBeforeInternals(TagUsage.ONLY_TAG); @@ -195,7 +192,7 @@ protected void deleteTestDir(FileSystem fs) throws IOException { } } - @Test + @TestTemplate public void testSeekBeforeWithReSeekTo() throws Exception { testSeekBeforeWithReSeekToInternals(TagUsage.NO_TAG); testSeekBeforeWithReSeekToInternals(TagUsage.ONLY_TAG); @@ -288,7 +285,7 @@ protected void testSeekBeforeWithReSeekToInternals(TagUsage tagUsage) throws IOE deleteTestDir(fs); } - @Test + @TestTemplate public void testSeekTo() throws Exception { testSeekToInternals(TagUsage.NO_TAG); testSeekToInternals(TagUsage.ONLY_TAG); @@ -320,7 +317,7 @@ protected void testSeekToInternals(TagUsage tagUsage) throws IOException { deleteTestDir(fs); } - @Test + @TestTemplate public void testBlockContainingKey() throws Exception { testBlockContainingKeyInternals(TagUsage.NO_TAG); testBlockContainingKeyInternals(TagUsage.ONLY_TAG); @@ -333,7 +330,7 @@ protected void testBlockContainingKeyInternals(TagUsage tagUsage) throws IOExcep Configuration conf = TEST_UTIL.getConfiguration(); HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf); HFileBlockIndex.BlockIndexReader blockIndexReader = reader.getDataBlockIndexReader(); - System.out.println(blockIndexReader.toString()); + LOG.info(blockIndexReader.toString()); // falls before the start of the file. assertEquals(-1, blockIndexReader.rootBlockContainingKey(toKV("a", tagUsage))); assertEquals(0, blockIndexReader.rootBlockContainingKey(toKV("c", tagUsage))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java index 3c6b4647ef51..81a45b55bcd8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java @@ -17,33 +17,28 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.ClassSize; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests the concurrent TinyLfuBlockCache. */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestTinyLfuBlockCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestTinyLfuBlockCache.class); - @Test public void testCacheSimple() throws Exception { @@ -82,8 +77,8 @@ public void testCacheSimple() throws Exception { for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); } - assertEquals("Cache should ignore cache requests for blocks already in cache", - expectedBlockCount, cache.getBlockCount()); + assertEquals(expectedBlockCount, cache.getBlockCount(), + "Cache should ignore cache requests for blocks already in cache"); // Verify correctly calculated cache heap size assertEquals(expectedCacheSize, cache.heapSize()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java index 61959e85ee87..b41185ac50d7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java @@ -30,12 +30,13 @@ import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.MULTI_FACTOR_CONFIG_NAME; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.QUEUE_ADDITION_WAIT_TIME; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.SINGLE_FACTOR_CONFIG_NAME; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -53,10 +54,11 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Waiter; @@ -84,14 +86,11 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Threads; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -103,31 +102,29 @@ *

* Tests will ensure that blocks' data correctness under several threads concurrency */ -@RunWith(Parameterized.class) -@Category({ IOTests.class, LargeTests.class }) +@Tag(IOTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: blockSize={0}, bucketSizes={1}") public class TestBucketCache { private static final Logger LOG = LoggerFactory.getLogger(TestBucketCache.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBucketCache.class); - - @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}") - public static Iterable data() { - return Arrays.asList(new Object[][] { { 8192, null }, // TODO: why is 8k the default blocksize - // for these tests? - { 16 * 1024, + public static Stream parameters() { + // TODO: why is 8k the default blocksize for these tests? + return Stream.of(Arguments.of(8192, null), + Arguments.of(16 * 1024, new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024, 28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024, - 128 * 1024 + 1024 } } }); + 128 * 1024 + 1024 })); } - @Parameterized.Parameter(0) - public int constructedBlockSize; + private final int constructedBlockSize; + private final int[] constructedBlockSizes; - @Parameterized.Parameter(1) - public int[] constructedBlockSizes; + public TestBucketCache(int constructedBlockSize, int[] constructedBlockSizes) { + this.constructedBlockSize = constructedBlockSize; + this.constructedBlockSizes = constructedBlockSizes; + } BucketCache cache; final int CACHE_SIZE = 1000000; @@ -162,13 +159,13 @@ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) { } } - @Before + @BeforeEach public void setup() throws IOException { cache = new MockedBucketCache(ioEngineName, capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, null); } - @After + @AfterEach public void tearDown() { cache.shutdown(); } @@ -191,7 +188,7 @@ private static T randFrom(List a) { return a.get(ThreadLocalRandom.current().nextInt(a.size())); } - @Test + @TestTemplate public void testBucketAllocator() throws BucketAllocatorException { BucketAllocator mAllocator = cache.getAllocator(); /* @@ -218,7 +215,7 @@ public void testBucketAllocator() throws BucketAllocatorException { for (Integer blockSize : BLOCKSIZES) { BucketSizeInfo bucketSizeInfo = mAllocator.roundUpToBucketSizeInfo(blockSize); IndexStatistics indexStatistics = bucketSizeInfo.statistics(); - assertEquals("unexpected freeCount for " + bucketSizeInfo, 0, indexStatistics.freeCount()); + assertEquals(0, indexStatistics.freeCount(), "unexpected freeCount for " + bucketSizeInfo); // we know the block sizes above are multiples of 1024, but default bucket sizes give an // additional 1024 on top of that so this counts towards fragmentation in our test @@ -236,17 +233,17 @@ public void testBucketAllocator() throws BucketAllocatorException { assertEquals(0, mAllocator.getUsedSize()); } - @Test + @TestTemplate public void testCacheSimple() throws Exception { CacheTestUtils.testCacheSimple(cache, BLOCK_SIZE, NUM_QUERIES); } - @Test + @TestTemplate public void testCacheMultiThreadedSingleKey() throws Exception { CacheTestUtils.hammerSingleKey(cache, 2 * NUM_THREADS, 2 * NUM_QUERIES); } - @Test + @TestTemplate public void testHeapSizeChanges() throws Exception { cache.stopWriterThreads(); CacheTestUtils.testHeapSizeChanges(cache, BLOCK_SIZE); @@ -273,7 +270,7 @@ private void cacheAndWaitUntilFlushedToBucket(BucketCache cache, BlockCacheKey c waitUntilFlushedToBucket(cache, cacheKey); } - @Test + @TestTemplate public void testMemoryLeak() throws Exception { final BlockCacheKey cacheKey = new BlockCacheKey("dummy", 1L); cacheAndWaitUntilFlushedToBucket(cache, cacheKey, @@ -315,10 +312,10 @@ public void run() { */ assertEquals(1L, cache.getBlockCount()); assertTrue(cache.getCurrentSize() > 0L); - assertTrue("We should have a block!", cache.iterator().hasNext()); + assertTrue(cache.iterator().hasNext(), "We should have a block!"); } - @Test + @TestTemplate public void testRetrieveFromFile() throws Exception { Path testDir = createAndGetTestDir(); String ioEngineName = "file:" + testDir + "/bucket.cache"; @@ -339,14 +336,14 @@ public void testRetrieveFromFile() throws Exception { } } - @Test + @TestTemplate public void testRetrieveFromMMap() throws Exception { final Path testDir = createAndGetTestDir(); final String ioEngineName = "mmap:" + testDir + "/bucket.cache"; testRetrievalUtils(testDir, ioEngineName); } - @Test + @TestTemplate public void testRetrieveFromPMem() throws Exception { final Path testDir = createAndGetTestDir(); final String ioEngineName = "pmem:" + testDir + "/bucket.cache"; @@ -403,20 +400,20 @@ private void testRetrievalUtils(Path testDir, String ioEngineName) assertTrue(new File(persistencePath).exists()); } - @Test + @TestTemplate public void testRetrieveUnsupportedIOE() throws Exception { try { final Path testDir = createAndGetTestDir(); final String ioEngineName = testDir + "/bucket.cache"; testRetrievalUtils(testDir, ioEngineName); - Assert.fail("Should have thrown IllegalArgumentException because of unsupported IOEngine!!"); + fail("Should have thrown IllegalArgumentException because of unsupported IOEngine!!"); } catch (IllegalArgumentException e) { - Assert.assertEquals("Don't understand io engine name for cache- prefix with file:, " + assertEquals("Don't understand io engine name for cache- prefix with file:, " + "files:, mmap: or offheap", e.getMessage()); } } - @Test + @TestTemplate public void testRetrieveFromMultipleFiles() throws Exception { final Path testDirInitial = createAndGetTestDir(); final Path newTestDir = new HBaseTestingUtility().getDataTestDir(); @@ -441,7 +438,7 @@ public void testRetrieveFromMultipleFiles() throws Exception { } } - @Test + @TestTemplate public void testRetrieveFromFileWithoutPersistence() throws Exception { BucketCache bucketCache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, null); @@ -472,7 +469,7 @@ public void testRetrieveFromFileWithoutPersistence() throws Exception { } } - @Test + @TestTemplate public void testBucketAllocatorLargeBuckets() throws BucketAllocatorException { long availableSpace = 20 * 1024L * 1024 * 1024; int[] bucketSizes = new int[] { 1024, 1024 * 1024, 1024 * 1024 * 1024 }; @@ -480,7 +477,7 @@ public void testBucketAllocatorLargeBuckets() throws BucketAllocatorException { assertTrue(allocator.getBuckets().length > 0); } - @Test + @TestTemplate public void testGetPartitionSize() throws IOException { // Test default values validateGetPartitionSize(cache, DEFAULT_SINGLE_FACTOR, DEFAULT_MIN_FACTOR); @@ -500,7 +497,7 @@ public void testGetPartitionSize() throws IOException { validateGetPartitionSize(cache, 0.2f, 0.5f); } - @Test + @TestTemplate public void testCacheSizeCapacity() throws IOException { // Test cache capacity (capacity / blockSize) < Integer.MAX_VALUE validateGetPartitionSize(cache, DEFAULT_SINGLE_FACTOR, DEFAULT_MIN_FACTOR); @@ -512,13 +509,13 @@ public void testCacheSizeCapacity() throws IOException { try { new BucketCache(ioEngineName, Long.MAX_VALUE, 1, constructedBlockSizes, writeThreads, writerQLen, null, 100, conf); - Assert.fail("Should have thrown IllegalArgumentException because of large cache capacity!"); + fail("Should have thrown IllegalArgumentException because of large cache capacity!"); } catch (IllegalArgumentException e) { - Assert.assertEquals("Cache capacity is too large, only support 32TB now", e.getMessage()); + assertEquals("Cache capacity is too large, only support 32TB now", e.getMessage()); } } - @Test + @TestTemplate public void testValidBucketCacheConfigs() throws IOException { Configuration conf = HBaseConfiguration.create(); conf.setFloat(ACCEPT_FACTOR_CONFIG_NAME, 0.9f); @@ -532,20 +529,20 @@ public void testValidBucketCacheConfigs() throws IOException { constructedBlockSizes, writeThreads, writerQLen, null, 100, conf); assertTrue(cache.waitForCacheInitialization(10000)); - assertEquals(ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", 0.9f, - cache.getAcceptableFactor(), 0); - assertEquals(MIN_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f, cache.getMinFactor(), 0); - assertEquals(EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f, - cache.getExtraFreeFactor(), 0); - assertEquals(SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.1f, cache.getSingleFactor(), - 0); - assertEquals(MULTI_FACTOR_CONFIG_NAME + " failed to propagate.", 0.7f, cache.getMultiFactor(), - 0); - assertEquals(MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.", 0.2f, cache.getMemoryFactor(), - 0); + assertEquals(0.9f, cache.getAcceptableFactor(), 0, + ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate."); + assertEquals(0.5f, cache.getMinFactor(), 0, MIN_FACTOR_CONFIG_NAME + " failed to propagate."); + assertEquals(0.5f, cache.getExtraFreeFactor(), 0, + EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate."); + assertEquals(0.1f, cache.getSingleFactor(), 0, + SINGLE_FACTOR_CONFIG_NAME + " failed to propagate."); + assertEquals(0.7f, cache.getMultiFactor(), 0, + MULTI_FACTOR_CONFIG_NAME + " failed to propagate."); + assertEquals(0.2f, cache.getMemoryFactor(), 0, + MEMORY_FACTOR_CONFIG_NAME + " failed to propagate."); } - @Test + @TestTemplate public void testInvalidAcceptFactorConfig() throws IOException { float[] configValues = { -1f, 0.2f, 0.86f, 1.05f }; boolean[] expectedOutcomes = { false, false, true, false }; @@ -554,7 +551,7 @@ public void testInvalidAcceptFactorConfig() throws IOException { checkConfigValues(conf, configMappings, expectedOutcomes); } - @Test + @TestTemplate public void testInvalidMinFactorConfig() throws IOException { float[] configValues = { -1f, 0f, 0.96f, 1.05f }; // throws due to <0, in expected range, minFactor > acceptableFactor, > 1.0 @@ -564,7 +561,7 @@ public void testInvalidMinFactorConfig() throws IOException { checkConfigValues(conf, configMappings, expectedOutcomes); } - @Test + @TestTemplate public void testInvalidExtraFreeFactorConfig() throws IOException { float[] configValues = { -1f, 0f, 0.2f, 1.05f }; // throws due to <0, in expected range, in expected range, config can be > 1.0 @@ -575,7 +572,7 @@ public void testInvalidExtraFreeFactorConfig() throws IOException { checkConfigValues(conf, configMappings, expectedOutcomes); } - @Test + @TestTemplate public void testInvalidCacheSplitFactorConfig() throws IOException { float[] singleFactorConfigValues = { 0.2f, 0f, -0.2f, 1f }; float[] multiFactorConfigValues = { 0.4f, 0f, 1f, .05f }; @@ -602,11 +599,11 @@ private void checkConfigValues(Configuration conf, Map configMa BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, null, 100, conf); assertTrue(cache.waitForCacheInitialization(10000)); - assertTrue("Created BucketCache and expected it to succeed: " + expectSuccess[i] - + ", but it actually was: " + !expectSuccess[i], expectSuccess[i]); + assertTrue(expectSuccess[i], "Created BucketCache and expected it to succeed: " + + expectSuccess[i] + ", but it actually was: " + !expectSuccess[i]); } catch (IllegalArgumentException e) { - assertFalse("Created BucketCache and expected it to succeed: " + expectSuccess[i] - + ", but it actually was: " + !expectSuccess[i], expectSuccess[i]); + assertFalse(expectSuccess[i], "Created BucketCache and expected it to succeed: " + + expectSuccess[i] + ", but it actually was: " + !expectSuccess[i]); } } } @@ -618,7 +615,7 @@ private void validateGetPartitionSize(BucketCache bucketCache, float partitionFa assertEquals(expectedOutput, bucketCache.getPartitionSize(partitionFactor)); } - @Test + @TestTemplate public void testOffsetProducesPositiveOutput() { // This number is picked because it produces negative output if the values isn't ensured to be // positive. See HBASE-18757 for more information. @@ -629,7 +626,7 @@ public void testOffsetProducesPositiveOutput() { assertEquals(testValue, bucketEntry.offset()); } - @Test + @TestTemplate public void testEvictionCount() throws InterruptedException { int size = 100; int length = HConstants.HFILEBLOCK_HEADER_SIZE + size; @@ -680,7 +677,7 @@ public void testEvictionCount() throws InterruptedException { assertEquals(1, cache.getStats().getEvictionCount()); } - @Test + @TestTemplate public void testCacheBlockNextBlockMetadataMissing() throws Exception { int size = 100; int length = HConstants.HFILEBLOCK_HEADER_SIZE + size; @@ -738,7 +735,7 @@ public void testCacheBlockNextBlockMetadataMissing() throws Exception { assertEquals(1, blockWithoutNextBlockMetadata.getBufferReadOnly().refCnt()); } - @Test + @TestTemplate public void testRAMCache() { int size = 100; int length = HConstants.HFILEBLOCK_HEADER_SIZE + size; @@ -777,7 +774,7 @@ public void testRAMCache() { assertEquals(1, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt()); } - @Test + @TestTemplate public void testFreeBlockWhenIOEngineWriteFailure() throws IOException { // initialize an block. int size = 100, offset = 20; @@ -803,14 +800,14 @@ public void testFreeBlockWhenIOEngineWriteFailure() throws IOException { BlockCacheKey key = new BlockCacheKey("dummy", 1L); RAMQueueEntry re = new RAMQueueEntry(key, block, 1, true, false); - Assert.assertEquals(0, allocator.getUsedSize()); + assertEquals(0, allocator.getUsedSize()); try { re.writeToCache(ioEngine, allocator, null, null, ByteBuffer.allocate(HFileBlock.BLOCK_METADATA_SPACE)); - Assert.fail(); + fail(); } catch (Exception e) { } - Assert.assertEquals(0, allocator.getUsedSize()); + assertEquals(0, allocator.getUsedSize()); } /** @@ -818,7 +815,7 @@ public void testFreeBlockWhenIOEngineWriteFailure() throws IOException { * could not be freed even if corresponding {@link HFileBlock} is evicted from * {@link BucketCache}. */ - @Test + @TestTemplate public void testFreeBucketEntryRestoredFromFile() throws Exception { BucketCache bucketCache = null; try { @@ -868,7 +865,7 @@ public void testFreeBucketEntryRestoredFromFile() throws Exception { } } - @Test + @TestTemplate public void testBlockAdditionWaitWhenCache() throws Exception { BucketCache bucketCache = null; try { @@ -931,7 +928,7 @@ public void testBlockAdditionWaitWhenCache() throws Exception { } } - @Test + @TestTemplate public void testOnConfigurationChange() throws Exception { BucketCache bucketCache = null; try { @@ -974,7 +971,7 @@ public void testOnConfigurationChange() throws Exception { } } - @Test + @TestTemplate public void testNotifyFileCachingCompletedSuccess() throws Exception { BucketCache bucketCache = null; try { @@ -998,7 +995,7 @@ public void testNotifyFileCachingCompletedSuccess() throws Exception { } } - @Test + @TestTemplate public void testNotifyFileCachingCompletedForEncodedDataSuccess() throws Exception { BucketCache bucketCache = null; try { @@ -1022,7 +1019,7 @@ public void testNotifyFileCachingCompletedForEncodedDataSuccess() throws Excepti } } - @Test + @TestTemplate public void testNotifyFileCachingCompletedNotAllCached() throws Exception { BucketCache bucketCache = null; try { @@ -1060,7 +1057,7 @@ private BucketCache testNotifyFileCachingCompletedForTenBlocks(Path filePath, return bucketCache; } - @Test + @TestTemplate public void testEvictOrphansOutOfGracePeriod() throws Exception { BucketCache bucketCache = testEvictOrphans(0); assertEquals(10, bucketCache.getBackingMap().size()); @@ -1068,7 +1065,7 @@ public void testEvictOrphansOutOfGracePeriod() throws Exception { .filter(key -> key.getHfileName().equals("testEvictOrphans-orphan")).count()); } - @Test + @TestTemplate public void testEvictOrphansWithinGracePeriod() throws Exception { BucketCache bucketCache = testEvictOrphans(60 * 60 * 1000L); assertEquals(18, bucketCache.getBackingMap().size()); @@ -1119,7 +1116,7 @@ private BucketCache testEvictOrphans(long orphanEvictionGracePeriod) throws Exce return bucketCache; } - @Test + @TestTemplate public void testBlockPriority() throws Exception { HFileBlockPair block = CacheTestUtils.generateHFileBlocks(BLOCK_SIZE, 1)[0]; cacheAndWaitUntilFlushedToBucket(cache, block.getBlockName(), block.getBlock(), true); @@ -1128,7 +1125,7 @@ public void testBlockPriority() throws Exception { assertEquals(cache.backingMap.get(block.getBlockName()).getPriority(), BlockPriority.MULTI); } - @Test + @TestTemplate public void testIOTimePerHitReturnsZeroWhenNoHits() throws NoSuchFieldException, IllegalAccessException { CacheStats cacheStats = cache.getStats(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java index b31c3706dd7e..a3c59e273054 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java @@ -19,24 +19,23 @@ import static org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.FileOutputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.io.hfile.CacheTestUtils; import org.apache.hadoop.hbase.io.hfile.Cacheable; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for HBASE-29857: BucketCache recovery should gracefully handle empty or truncated @@ -46,13 +45,10 @@ * magic bytes without actual cache data. The fix adds null checks that throw IOException instead of * allowing NullPointerException to propagate. */ -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) public class TestBucketCacheEmptyPersistence { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBucketCacheEmptyPersistence.class); - private static final long CAPACITY_SIZE = 32 * 1024 * 1024; private static final int WRITE_THREADS = BucketCache.DEFAULT_WRITER_THREADS; private static final int WRITER_QUEUE_LEN = BucketCache.DEFAULT_WRITER_QUEUE_ITEMS; @@ -78,7 +74,7 @@ public void testEmptyPersistenceFileNonChunkedFormat() throws Exception { try (FileOutputStream fos = new FileOutputStream(persistenceFile)) { fos.write(ProtobufMagic.PB_MAGIC); } - assertTrue("Persistence file should exist", persistenceFile.exists()); + assertTrue(persistenceFile.exists(), "Persistence file should exist"); int[] bucketSizes = new int[] { 8 * 1024 + 1024 }; @@ -88,12 +84,12 @@ public void testEmptyPersistenceFileNonChunkedFormat() throws Exception { WRITE_THREADS, WRITER_QUEUE_LEN, persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, conf); // Cache should initialize successfully (not hang or throw) - assertTrue("Cache should initialize successfully after recovering from empty file", - bucketCache.waitForCacheInitialization(10000)); + assertTrue(bucketCache.waitForCacheInitialization(10000), + "Cache should initialize successfully after recovering from empty file"); // Verify the cache was reset (backing map should be empty since file had no valid data) - assertEquals("Backing map should be empty after recovering from empty persistence file", 0, - bucketCache.backingMap.size()); + assertEquals(0, bucketCache.backingMap.size(), + "Backing map should be empty after recovering from empty persistence file"); // Verify the cache is usable - we can add and retrieve blocks CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 1); @@ -104,7 +100,7 @@ public void testEmptyPersistenceFileNonChunkedFormat() throws Exception { // Verify block can be retrieved Cacheable retrieved = bucketCache.getBlock(blocks[0].getBlockName(), false, false, false); - assertNotNull("Should be able to retrieve cached block", retrieved); + assertNotNull(retrieved, "Should be able to retrieve cached block"); bucketCache.shutdown(); testUtil.cleanupTestDir(); @@ -131,7 +127,7 @@ public void testEmptyPersistenceFileChunkedFormat() throws Exception { try (FileOutputStream fos = new FileOutputStream(persistenceFile)) { fos.write(BucketProtoUtils.PB_MAGIC_V2); } - assertTrue("Persistence file should exist", persistenceFile.exists()); + assertTrue(persistenceFile.exists(), "Persistence file should exist"); int[] bucketSizes = new int[] { 8 * 1024 + 1024 }; @@ -141,12 +137,12 @@ public void testEmptyPersistenceFileChunkedFormat() throws Exception { WRITE_THREADS, WRITER_QUEUE_LEN, persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, conf); // Cache should initialize successfully (not hang or throw) - assertTrue("Cache should initialize successfully after recovering from empty file", - bucketCache.waitForCacheInitialization(10000)); + assertTrue(bucketCache.waitForCacheInitialization(10000), + "Cache should initialize successfully after recovering from empty file"); // Verify the cache was reset (backing map should be empty since file had no valid data) - assertEquals("Backing map should be empty after recovering from empty persistence file", 0, - bucketCache.backingMap.size()); + assertEquals(0, bucketCache.backingMap.size(), + "Backing map should be empty after recovering from empty persistence file"); // Verify the cache is usable - we can add and retrieve blocks CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 1); @@ -157,7 +153,7 @@ public void testEmptyPersistenceFileChunkedFormat() throws Exception { // Verify block can be retrieved Cacheable retrieved = bucketCache.getBlock(blocks[0].getBlockName(), false, false, false); - assertNotNull("Should be able to retrieve cached block", retrieved); + assertNotNull(retrieved, "Should be able to retrieve cached block"); bucketCache.shutdown(); testUtil.cleanupTestDir(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java index 35a60ec93125..1f6235b8207c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.io.hfile.bucket; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -45,22 +44,15 @@ import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, MediumTests.class }) +@Tag(IOTests.TAG) +@Tag(MediumTests.TAG) public class TestBucketCachePersister { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBucketCachePersister.class); - - public TestName name = new TestName(); - public int constructedBlockSize = 16 * 1024; private static final Logger LOG = LoggerFactory.getLogger(TestBucketCachePersister.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java index 4ee3f37819f7..08e7e8ea69e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.io.hfile.bucket; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; @@ -30,7 +30,6 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.io.ByteBuffAllocator; @@ -46,17 +45,13 @@ import org.apache.hadoop.hbase.nio.RefCnt; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestBucketCacheRefCnt { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBucketCacheRefCnt.class); - private static final String IO_ENGINE = "offheap"; private static final long CAPACITY_SIZE = 32 * 1024 * 1024; private static final int BLOCK_SIZE = 1024; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java index facbe7c50d11..50763eeb0402 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java @@ -19,15 +19,14 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.Cacheable; @@ -35,20 +34,16 @@ import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestBucketWriterThread { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBucketWriterThread.class); - private BucketCache bc; private BucketCache.WriterThread wt; private BlockingQueue q; @@ -75,7 +70,7 @@ protected void startWriterThreads() { * Set up variables and get BucketCache and WriterThread into state where tests can manually * control the running of WriterThread and BucketCache is empty. */ - @Before + @BeforeEach public void setUp() throws Exception { // Arbitrary capacity. final int capacity = 16; @@ -99,7 +94,7 @@ public void setUp() throws Exception { assertTrue(q.isEmpty()); } - @After + @AfterEach public void tearDown() throws Exception { if (this.bc != null) this.bc.shutdown(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java index b42e7be804db..04a0ef65af39 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hbase.io.hfile.bucket; +import static org.junit.jupiter.api.Assertions.assertEquals; + import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.hfile.Cacheable; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer; @@ -27,21 +28,16 @@ import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Basic test for {@link ByteBufferIOEngine} */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestByteBufferIOEngine { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestByteBufferIOEngine.class); - /** * Override the {@link BucketEntry} so that we can set an arbitrary offset. */ @@ -114,10 +110,10 @@ public void testByteBufferIOEngine() throws Exception { BucketEntry be = createBucketEntry(offset, blockSize); ioEngine.read(be); ByteBuff dst = getByteBuff(be); - Assert.assertEquals(src.remaining(), blockSize); - Assert.assertEquals(dst.remaining(), blockSize); - Assert.assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst, - dst.position(), dst.remaining())); + assertEquals(src.remaining(), blockSize); + assertEquals(dst.remaining(), blockSize); + assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst, dst.position(), + dst.remaining())); } assert testOffsetAtStartNum == 0; assert testOffsetAtEndNum == 0; @@ -189,10 +185,10 @@ public void testByteBufferIOEngineWithMBB() throws Exception { BucketEntry be = createBucketEntry(offset, blockSize); ioEngine.read(be); ByteBuff dst = getByteBuff(be); - Assert.assertEquals(src.remaining(), blockSize); - Assert.assertEquals(dst.remaining(), blockSize); - Assert.assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst, - dst.position(), dst.remaining())); + assertEquals(src.remaining(), blockSize); + assertEquals(dst.remaining(), blockSize); + assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst, dst.position(), + dst.remaining())); } assert testOffsetAtStartNum == 0; assert testOffsetAtEndNum == 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java index 8cde8c6347d3..dbd7ab047cf2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java @@ -19,28 +19,23 @@ import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.createBucketEntry; import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.getByteBuff; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Basic test for {@link ExclusiveMemoryMmapIOEngine} */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestExclusiveMemoryMmapEngine { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestExclusiveMemoryMmapEngine.class); - @Test public void testFileMmapEngine() throws IOException { int size = 2 * 1024 * 1024; // 2 MB @@ -63,10 +58,9 @@ public void testFileMmapEngine() throws IOException { fileMmapEngine.read(be); ByteBuff dst = getByteBuff(be); - Assert.assertEquals(src.remaining(), len); - Assert.assertEquals(dst.remaining(), len); - Assert.assertEquals(0, - ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining())); + assertEquals(src.remaining(), len); + assertEquals(dst.remaining(), len); + assertEquals(0, ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining())); } } finally { File file = new File(filePath); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java index af70c61c12c4..f19d13d8490e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java @@ -19,11 +19,11 @@ import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.createBucketEntry; import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.getByteBuff; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -31,18 +31,15 @@ import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.RefCnt; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -50,13 +47,10 @@ /** * Basic test for {@link FileIOEngine} */ -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestFileIOEngine { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFileIOEngine.class); - private static final long TOTAL_CAPACITY = 6 * 1024 * 1024; // 6 MB private static final String[] FILE_PATHS = { "testFileIOEngine1", "testFileIOEngine2", "testFileIOEngine3" }; @@ -81,12 +75,12 @@ public class TestFileIOEngine { boundaryStopPositions.add(SIZE_PER_FILE * FILE_PATHS.length - 1); } - @Before + @BeforeEach public void setUp() throws IOException { fileIOEngine = new FileIOEngine(TOTAL_CAPACITY, false, FILE_PATHS); } - @After + @AfterEach public void cleanUp() { fileIOEngine.shutdown(); for (String filePath : FILE_PATHS) { @@ -177,10 +171,9 @@ public void testClosedChannelException() throws IOException { fileIOEngine.read(be); ByteBuff dst = getByteBuff(be); - Assert.assertEquals(src.remaining(), len); - Assert.assertEquals(dst.remaining(), len); - Assert.assertEquals(0, - ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining())); + assertEquals(src.remaining(), len); + assertEquals(dst.remaining(), len); + assertEquals(0, ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining())); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java index a2909c005fd4..7d981d793553 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java @@ -17,19 +17,19 @@ */ package org.apache.hadoop.hbase.io.hfile.bucket; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; -import java.util.Arrays; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -41,39 +41,33 @@ import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.LargeTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@RunWith(Parameterized.class) -@Category({ IOTests.class, LargeTests.class }) +@Tag(IOTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: blockSize={0}, bucketSizes={1}") public class TestPrefetchPersistence { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPrefetchPersistence.class); - public TestName name = new TestName(); - - @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}") @SuppressWarnings("checkstyle:Indentation") - public static Iterable data() { - return Arrays.asList(new Object[][] { { 16 * 1024, + public static Stream parameters() { + return Stream.of(Arguments.of(16 * 1024, new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024, 28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024, - 128 * 1024 + 1024 } } }); + 128 * 1024 + 1024 })); } - @Parameterized.Parameter(0) - public int constructedBlockSize; + final int constructedBlockSize; + final int[] constructedBlockSizes; - @Parameterized.Parameter(1) - public int[] constructedBlockSizes; + public TestPrefetchPersistence(int constructedBlockSize, int[] constructedBlockSizes) { + this.constructedBlockSize = constructedBlockSize; + this.constructedBlockSizes = constructedBlockSizes; + } private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchPersistence.class); @@ -95,7 +89,7 @@ public static Iterable data() { final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS; final int writerQLen = BucketCache.DEFAULT_WRITER_QUEUE_ITEMS; - @Before + @BeforeEach public void setup() throws IOException { conf = TEST_UTIL.getConfiguration(); conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true); @@ -104,7 +98,7 @@ public void setup() throws IOException { fs = HFileSystem.get(conf); } - @Test + @TestTemplate public void testPrefetchPersistence() throws Exception { bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java index 58d9385f57e9..ac9f52b84a9e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java @@ -17,10 +17,14 @@ */ package org.apache.hadoop.hbase.io.hfile.bucket; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.nio.ByteBuffer; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; @@ -33,21 +37,16 @@ import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ IOTests.class, SmallTests.class }) +@Tag(IOTests.TAG) +@Tag(SmallTests.TAG) public class TestRAMCache { private static final Logger LOG = LoggerFactory.getLogger(TestRAMCache.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRAMCache.class); - // Define a mock HFileBlock. private static class MockHFileBlock extends HFileBlock { @@ -92,8 +91,8 @@ public void testAtomicRAMCache() throws Exception { new HFileContextBuilder().build(), ByteBuffAllocator.HEAP); RAMQueueEntry re = new RAMQueueEntry(key, blk, 1, false, false); - Assert.assertNull(cache.putIfAbsent(key, re)); - Assert.assertEquals(cache.putIfAbsent(key, re), re); + assertNull(cache.putIfAbsent(key, re)); + assertEquals(cache.putIfAbsent(key, re), re); CountDownLatch latch = new CountDownLatch(1); blk.setLatch(latch); @@ -116,11 +115,11 @@ public void testAtomicRAMCache() throws Exception { }); t2.start(); Thread.sleep(200); - Assert.assertFalse(removed.get()); + assertFalse(removed.get()); latch.countDown(); Thread.sleep(200); - Assert.assertTrue(removed.get()); - Assert.assertFalse(error.get()); + assertTrue(removed.get()); + assertFalse(error.get()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java index 5ae3343d21e6..afc2437c327e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java @@ -22,32 +22,29 @@ import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.EXTRA_FREE_FACTOR_CONFIG_NAME; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.MIN_FACTOR_CONFIG_NAME; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.CacheTestUtils; import org.apache.hadoop.hbase.io.hfile.Cacheable; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Basic test for check file's integrity before start BucketCache in fileIOEngine */ -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) public class TestRecoveryPersistentBucketCache { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRecoveryPersistentBucketCache.class); final long capacitySize = 32 * 1024 * 1024; final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java index 9ba9eba5c61b..fde6d46f34ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java @@ -20,10 +20,10 @@ import static org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BACKING_MAP_PERSISTENCE_CHUNK_SIZE; import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.BufferedWriter; import java.io.File; @@ -33,54 +33,47 @@ import java.nio.file.Files; import java.nio.file.attribute.FileTime; import java.time.Instant; -import java.util.Arrays; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheTestUtils; import org.apache.hadoop.hbase.io.hfile.Cacheable; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; /** * Basic test for check file's integrity before start BucketCache in fileIOEngine */ -@RunWith(Parameterized.class) -@Category(SmallTests.class) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: blockSize={0}, bucketSizes={1}") public class TestVerifyBucketCacheFile { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestVerifyBucketCacheFile.class); - - @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}") - public static Iterable data() { - return Arrays.asList(new Object[][] { { 8192, null }, - { 16 * 1024, + + public static Stream parameters() { + return Stream.of(Arguments.of(8192, null), + Arguments.of(16 * 1024, new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024, 28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024, - 128 * 1024 + 1024 } } }); + 128 * 1024 + 1024 })); } - @Rule - public TestName name = new TestName(); - - @Parameterized.Parameter(0) - public int constructedBlockSize; + private final int constructedBlockSize; + private final int[] constructedBlockSizes; - @Parameterized.Parameter(1) - public int[] constructedBlockSizes; + public TestVerifyBucketCacheFile(int constructedBlockSize, int[] constructedBlockSizes) { + this.constructedBlockSize = constructedBlockSize; + this.constructedBlockSizes = constructedBlockSizes; + } final long capacitySize = 32 * 1024 * 1024; final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS; @@ -96,7 +89,7 @@ public static Iterable data() { * cache file and persistence file would be deleted before BucketCache start normally. * @throws Exception the exception */ - @Test + @TestTemplate public void testRetrieveFromFile() throws Exception { HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); Path testDir = TEST_UTIL.getDataTestDir(); @@ -108,9 +101,9 @@ public void testRetrieveFromFile() throws Exception { BucketCache bucketCache = null; BucketCache recoveredBucketCache = null; try { - bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, - constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName()); + bucketCache = + new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, + constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence"); assertTrue(bucketCache.waitForCacheInitialization(10000)); long usedSize = bucketCache.getAllocator().getUsedSize(); assertEquals(0, usedSize); @@ -126,9 +119,9 @@ public void testRetrieveFromFile() throws Exception { // 1.persist cache to file bucketCache.shutdown(); // restore cache from file - bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, - constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName()); + bucketCache = + new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, + constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence"); assertTrue(bucketCache.waitForCacheInitialization(10000)); assertEquals(usedSize, bucketCache.getAllocator().getUsedSize()); // persist cache to file @@ -139,9 +132,9 @@ public void testRetrieveFromFile() throws Exception { FileSystems.getDefault().getPath(testDir.toString(), "bucket.cache"); assertTrue(Files.deleteIfExists(cacheFile)); // can't restore cache from file - recoveredBucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, - constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName()); + recoveredBucketCache = + new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, + constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence"); assertTrue(recoveredBucketCache.waitForCacheInitialization(10000)); waitPersistentCacheValidation(conf, recoveredBucketCache); assertEquals(0, recoveredBucketCache.getAllocator().getUsedSize()); @@ -157,14 +150,13 @@ public void testRetrieveFromFile() throws Exception { recoveredBucketCache.shutdown(); // 3.delete backingMap persistence file - final java.nio.file.Path mapFile = FileSystems.getDefault().getPath(testDir.toString(), - "bucket.persistence" + name.getMethodName()); + final java.nio.file.Path mapFile = + FileSystems.getDefault().getPath(testDir.toString(), "bucket.persistence"); assertTrue(Files.deleteIfExists(mapFile)); // can't restore cache from file bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION, - conf); + testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf); assertTrue(bucketCache.waitForCacheInitialization(10000)); waitPersistentCacheValidation(conf, bucketCache); assertEquals(0, bucketCache.getAllocator().getUsedSize()); @@ -180,15 +172,14 @@ public void testRetrieveFromFile() throws Exception { TEST_UTIL.cleanupTestDir(); } - @Test + @TestTemplate public void testRetrieveFromFileAfterDelete() throws Exception { HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); Path testDir = TEST_UTIL.getDataTestDir(); TEST_UTIL.getTestFileSystem().mkdirs(testDir); Configuration conf = TEST_UTIL.getConfiguration(); conf.setLong(CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY, 300); - String mapFileName = - testDir + "/bucket.persistence" + name.getMethodName() + EnvironmentEdgeManager.currentTime(); + String mapFileName = testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime(); BucketCache bucketCache = null; try { bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, @@ -233,7 +224,7 @@ public void testRetrieveFromFileAfterDelete() throws Exception { * persistence file would be deleted before BucketCache start normally. * @throws Exception the exception */ - @Test + @TestTemplate public void testModifiedBucketCacheFileData() throws Exception { HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); Path testDir = TEST_UTIL.getDataTestDir(); @@ -246,8 +237,7 @@ public void testModifiedBucketCacheFileData() throws Exception { try { bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION, - conf); + testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf); assertTrue(bucketCache.waitForCacheInitialization(10000)); long usedSize = bucketCache.getAllocator().getUsedSize(); assertEquals(0, usedSize); @@ -272,8 +262,7 @@ public void testModifiedBucketCacheFileData() throws Exception { // can't restore cache from file bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION, - conf); + testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf); assertTrue(bucketCache.waitForCacheInitialization(10000)); waitPersistentCacheValidation(conf, bucketCache); assertEquals(0, bucketCache.getAllocator().getUsedSize()); @@ -300,7 +289,7 @@ public void testModifiedBucketCacheFileData() throws Exception { * recoverable from the cache. * @throws Exception the exception */ - @Test + @TestTemplate public void testModifiedBucketCacheFileTime() throws Exception { HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); Path testDir = TEST_UTIL.getDataTestDir(); @@ -312,8 +301,7 @@ public void testModifiedBucketCacheFileTime() throws Exception { try { bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION, - conf); + testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf); assertTrue(bucketCache.waitForCacheInitialization(10000)); long usedSize = bucketCache.getAllocator().getUsedSize(); assertEquals(0, usedSize); @@ -338,8 +326,7 @@ public void testModifiedBucketCacheFileTime() throws Exception { // can't restore cache from file bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen, - testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION, - conf); + testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf); assertTrue(bucketCache.waitForCacheInitialization(10000)); waitPersistentCacheValidation(conf, bucketCache); assertEquals(usedSize, bucketCache.getAllocator().getUsedSize()); @@ -360,7 +347,7 @@ public void testModifiedBucketCacheFileTime() throws Exception { * corruption. * @throws Exception the exception */ - @Test + @TestTemplate public void testBucketCacheRecovery() throws Exception { HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); Path testDir = TEST_UTIL.getDataTestDir(); @@ -368,8 +355,7 @@ public void testBucketCacheRecovery() throws Exception { Configuration conf = HBaseConfiguration.create(); // Disables the persister thread by setting its interval to MAX_VALUE conf.setLong(BUCKETCACHE_PERSIST_INTERVAL_KEY, Long.MAX_VALUE); - String mapFileName = - testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime() + name.getMethodName(); + String mapFileName = testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime(); BucketCache bucketCache = null; BucketCache newBucketCache = null; try { @@ -420,18 +406,18 @@ public void testBucketCacheRecovery() throws Exception { } } - @Test + @TestTemplate public void testSingleChunk() throws Exception { testChunkedBackingMapRecovery(5, 5); } - @Test + @TestTemplate public void testCompletelyFilledChunks() throws Exception { // Test where the all the chunks are complete with chunkSize entries testChunkedBackingMapRecovery(5, 10); } - @Test + @TestTemplate public void testPartiallyFilledChunks() throws Exception { // Test where the last chunk is not completely filled. testChunkedBackingMapRecovery(5, 13); @@ -444,8 +430,7 @@ private void testChunkedBackingMapRecovery(int chunkSize, int numBlocks) throws Configuration conf = HBaseConfiguration.create(); conf.setLong(BACKING_MAP_PERSISTENCE_CHUNK_SIZE, chunkSize); - String mapFileName = - testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime() + name.getMethodName(); + String mapFileName = testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime(); BucketCache bucketCache = null; BucketCache newBucketCache = null; try {