diff --git a/.gitignore b/.gitignore
index 9ab7a972d7ac..7d2c2b0e8570 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,3 +26,4 @@ tmp
**/.flattened-pom.xml
.vscode/
**/__pycache__
+.opencode
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
index 1df5b22a7029..ccdf9958ac12 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
@@ -53,6 +53,10 @@ public boolean supportsParameter(ParameterContext pc, ExtensionContext ec)
// test with wrapper type, otherwise it will always return false
return Primitives.wrap(expectedType).isAssignableFrom(value.getClass());
}
+ // non-primitive type can accept null value
+ if (value == null) {
+ return true;
+ }
return expectedType.isAssignableFrom(value.getClass());
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
index 809ca50bd5a2..894ee921596e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
@@ -17,24 +17,20 @@
*/
package org.apache.hadoop.hbase.io;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
+@Tag(MiscTests.TAG)
public class TestByteBufferOutputStream {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestByteBufferOutputStream.class);
-
@Test
public void testByteBufferReuse() throws IOException {
byte[] someBytes = Bytes.toBytes("some bytes");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java
index 77aa00ef91f9..e14298114212 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFSDataInputStreamWrapper.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.io;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.io.InputStream;
@@ -33,20 +33,16 @@
import org.apache.hadoop.fs.HasEnhancedByteBufferAccess;
import org.apache.hadoop.fs.ReadOption;
import org.apache.hadoop.fs.StreamCapabilities;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.io.ByteBufferPool;
-import org.junit.ClassRule;
import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
+@Tag(MiscTests.TAG)
public class TestFSDataInputStreamWrapper {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestFSDataInputStreamWrapper.class);
-
@Test
public void testUnbuffer() throws Exception {
InputStream pc = new ParentClass();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
index 61ebde208225..eb5f589be290 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
@@ -17,10 +17,11 @@
*/
package org.apache.hadoop.hbase.io;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -31,7 +32,6 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -40,21 +40,17 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
import org.apache.hadoop.ipc.RemoteException;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Test that FileLink switches between alternate locations when the current location moves or gets
* deleted.
*/
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestFileLink {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestFileLink.class);
-
@Test
public void testEquals() {
Path p1 = new Path("/p1");
@@ -160,7 +156,7 @@ public Configuration getConf() {
}
}
- @Test(expected = FileNotFoundException.class)
+ @Test
public void testLinkReadWithMissingFile() throws Exception {
HBaseTestingUtility testUtil = new HBaseTestingUtility();
FileSystem fs = new MyDistributedFileSystem();
@@ -173,7 +169,9 @@ public void testLinkReadWithMissingFile() throws Exception {
files.add(archivedPath);
FileLink link = new FileLink(files);
- link.open(fs);
+ assertThrows(FileNotFoundException.class, () -> {
+ link.open(fs);
+ });
}
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
index ce96f248564c..a9591f22b338 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
@@ -17,79 +17,69 @@
*/
package org.apache.hadoop.hbase.io;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.regex.Matcher;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.Pair;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
/**
* Test that FileLink switches between alternate locations when the current location moves or gets
* deleted.
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestHFileLink {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHFileLink.class);
-
- @Rule
- public TestName name = new TestName();
-
@Test
- public void testValidLinkNames() {
+ public void testValidLinkNames(TestInfo testInfo) {
String validLinkNames[] = { "foo=fefefe-0123456", "ns=foo=abababa-fefefefe" };
for (String name : validLinkNames) {
- Assert.assertTrue("Failed validating:" + name, name.matches(HFileLink.LINK_NAME_REGEX));
+ assertTrue(name.matches(HFileLink.LINK_NAME_REGEX), "Failed validating:" + name);
}
for (String name : validLinkNames) {
- Assert.assertTrue("Failed validating:" + name, HFileLink.isHFileLink(name));
+ assertTrue(HFileLink.isHFileLink(name), "Failed validating:" + name);
}
- String testName = name.getMethodName() + "=fefefe-0123456";
- Assert.assertEquals(TableName.valueOf(name.getMethodName()),
+ String testName = testInfo.getTestMethod().get().getName() + "=fefefe-0123456";
+ assertEquals(TableName.valueOf(testInfo.getTestMethod().get().getName()),
HFileLink.getReferencedTableName(testName));
- Assert.assertEquals("fefefe", HFileLink.getReferencedRegionName(testName));
- Assert.assertEquals("0123456", HFileLink.getReferencedHFileName(testName));
- Assert.assertEquals(testName,
- HFileLink.createHFileLinkName(TableName.valueOf(name.getMethodName()), "fefefe", "0123456"));
+ assertEquals("fefefe", HFileLink.getReferencedRegionName(testName));
+ assertEquals("0123456", HFileLink.getReferencedHFileName(testName));
+ assertEquals(testName, HFileLink.createHFileLinkName(
+ TableName.valueOf(testInfo.getTestMethod().get().getName()), "fefefe", "0123456"));
- testName = "ns=" + name.getMethodName() + "=fefefe-0123456";
- Assert.assertEquals(TableName.valueOf("ns", name.getMethodName()),
+ testName = "ns=" + testInfo.getTestMethod().get().getName() + "=fefefe-0123456";
+ assertEquals(TableName.valueOf("ns", testInfo.getTestMethod().get().getName()),
HFileLink.getReferencedTableName(testName));
- Assert.assertEquals("fefefe", HFileLink.getReferencedRegionName(testName));
- Assert.assertEquals("0123456", HFileLink.getReferencedHFileName(testName));
- Assert.assertEquals(testName, HFileLink
- .createHFileLinkName(TableName.valueOf("ns", name.getMethodName()), "fefefe", "0123456"));
+ assertEquals("fefefe", HFileLink.getReferencedRegionName(testName));
+ assertEquals("0123456", HFileLink.getReferencedHFileName(testName));
+ assertEquals(testName, HFileLink.createHFileLinkName(
+ TableName.valueOf("ns", testInfo.getTestMethod().get().getName()), "fefefe", "0123456"));
for (String name : validLinkNames) {
Matcher m = HFileLink.LINK_NAME_PATTERN.matcher(name);
assertTrue(m.matches());
- Assert.assertEquals(HFileLink.getReferencedTableName(name),
+ assertEquals(HFileLink.getReferencedTableName(name),
TableName.valueOf(m.group(1), m.group(2)));
- Assert.assertEquals(HFileLink.getReferencedRegionName(name), m.group(3));
- Assert.assertEquals(HFileLink.getReferencedHFileName(name), m.group(4));
+ assertEquals(HFileLink.getReferencedRegionName(name), m.group(3));
+ assertEquals(HFileLink.getReferencedHFileName(name), m.group(4));
}
}
@Test
- public void testBackReference() {
+ public void testBackReference(TestInfo testInfo) {
Path rootDir = new Path("/root");
Path archiveDir = new Path(rootDir, ".archive");
String storeFileName = "121212";
@@ -97,8 +87,8 @@ public void testBackReference() {
String encodedRegion = "FEFE";
String cf = "cf1";
- TableName refTables[] =
- { TableName.valueOf(name.getMethodName()), TableName.valueOf("ns", name.getMethodName()) };
+ TableName refTables[] = { TableName.valueOf(testInfo.getTestMethod().get().getName()),
+ TableName.valueOf("ns", testInfo.getTestMethod().get().getName()) };
for (TableName refTable : refTables) {
Path refTableDir = CommonFSUtils.getTableDir(archiveDir, refTable);
@@ -108,28 +98,26 @@ public void testBackReference() {
String refStoreFileName = refTable.getNameAsString().replace(TableName.NAMESPACE_DELIM, '=')
+ "=" + encodedRegion + "-" + storeFileName;
- TableName tableNames[] = { TableName.valueOf(name.getMethodName() + "1"),
- TableName.valueOf("ns", name.getMethodName() + "2"),
- TableName.valueOf(name.getMethodName() + ":" + name.getMethodName()) };
+ TableName tableNames[] = { TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"),
+ TableName.valueOf("ns", testInfo.getTestMethod().get().getName() + "2"),
+ TableName.valueOf(testInfo.getTestMethod().get().getName() + ":"
+ + testInfo.getTestMethod().get().getName()) };
for (TableName tableName : tableNames) {
Path tableDir = CommonFSUtils.getTableDir(rootDir, tableName);
Path regionDir = HRegion.getRegionDir(tableDir, encodedRegion);
Path cfDir = new Path(regionDir, cf);
- // Verify back reference creation
assertEquals(
encodedRegion + "." + tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='),
HFileLink.createBackReferenceName(CommonFSUtils.getTableName(tableDir).getNameAsString(),
encodedRegion));
- // verify parsing back reference
Pair parsedRef = HFileLink.parseBackReferenceName(encodedRegion + "."
+ tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='));
assertEquals(parsedRef.getFirst(), tableName);
assertEquals(encodedRegion, parsedRef.getSecond());
- // verify resolving back reference
Path storeFileDir = new Path(refLinkDir, encodedRegion + "."
+ tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='));
Path linkPath = new Path(cfDir, refStoreFileName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
index a999a4ac879c..2dd3e9e750b3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.io;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.file.Paths;
@@ -32,7 +32,6 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@@ -56,27 +55,27 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({ IOTests.class, SmallTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestHalfStoreFileReader {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHalfStoreFileReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHalfStoreFileReader.class);
private static HBaseTestingUtility TEST_UTIL;
- @BeforeClass
+ @BeforeAll
public static void setupBeforeClass() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.cleanupTestDir();
}
@@ -100,7 +99,7 @@ public void testHalfScanAndReseek() throws IOException, InterruptedException {
fs.mkdirs(parentPath);
String tableName = Paths.get(root_dir).getFileName().toString();
RegionInfo splitAHri = RegionInfoBuilder.newBuilder(TableName.valueOf(tableName)).build();
- Thread.currentThread().sleep(1000);
+ Thread.sleep(1000);
RegionInfo splitBHri = RegionInfoBuilder.newBuilder(TableName.valueOf(tableName)).build();
Path splitAPath = new Path(new Path(root_dir, splitAHri.getRegionNameAsString()), "CF");
Path splitBPath = new Path(new Path(root_dir, splitBHri.getRegionNameAsString()), "CF");
@@ -165,21 +164,19 @@ private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, Cach
(HalfStoreFileReader) storeFileInfo.createReader(context, cacheConf);
storeFileInfo.getHFileInfo().initMetaAndIndex(halfreader.getHFileReader());
halfreader.loadFileInfo();
- final HFileScanner scanner = halfreader.getScanner(false, false);
-
- scanner.seekTo();
- Cell curr;
- do {
- curr = scanner.getCell();
- KeyValue reseekKv = getLastOnCol(curr);
- int ret = scanner.reseekTo(reseekKv);
- assertTrue("reseek to returned: " + ret, ret > 0);
- // System.out.println(curr + ": " + ret);
- } while (scanner.next());
-
- int ret = scanner.reseekTo(getLastOnCol(curr));
- // System.out.println("Last reseek: " + ret);
- assertTrue(ret > 0);
+ try (HFileScanner scanner = halfreader.getScanner(false, false)) {
+ scanner.seekTo();
+ Cell curr;
+ do {
+ curr = scanner.getCell();
+ KeyValue reseekKv = getLastOnCol(curr);
+ int ret = scanner.reseekTo(reseekKv);
+ assertTrue(ret > 0, "reseek to returned: " + ret);
+ } while (scanner.next());
+
+ int ret = scanner.reseekTo(getLastOnCol(curr));
+ assertTrue(ret > 0);
+ }
halfreader.close(true);
}
@@ -218,8 +215,8 @@ public void testHalfScanner() throws IOException {
}
beforeMidKey = item;
}
- System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
- System.out.println("beforeMidKey: " + beforeMidKey);
+ LOG.info("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
+ LOG.info("beforeMidKey: " + beforeMidKey);
// Seek on the splitKey, should be in top, not in bottom
Cell foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index 38b093b997e3..9886e009f6ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -19,8 +19,8 @@
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.lessThan;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.lang.management.ManagementFactory;
@@ -42,7 +42,6 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
@@ -74,29 +73,25 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ClassSize;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Testing the sizing that HeapSize offers and compares to the size given by ClassSize.
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestHeapSize {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHeapSize.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestHeapSize.class);
// List of classes implementing HeapSize
// BatchOperation, BatchUpdate, BlockIndex, Entry, Entry, HStoreKey
// KeyValue, LruBlockCache, Put, WALKey
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
// Print detail on jvm so we know what is different should below test fail.
RuntimeMXBean b = ManagementFactory.getRuntimeMXBean();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java
index 51b9b3b7618d..2a82c77a3335 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java
@@ -17,28 +17,23 @@
*/
package org.apache.hadoop.hbase.io;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestImmutableBytesWritable {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImmutableBytesWritable.class);
-
@Test
public void testHash() throws Exception {
assertEquals(new ImmutableBytesWritable(Bytes.toBytes("xxabc"), 2, 3).hashCode(),
@@ -54,7 +49,7 @@ public void testSpecificCompare() {
ImmutableBytesWritable ibw1 = new ImmutableBytesWritable(new byte[] { 0x0f });
ImmutableBytesWritable ibw2 = new ImmutableBytesWritable(new byte[] { 0x00, 0x00 });
ImmutableBytesWritable.Comparator c = new ImmutableBytesWritable.Comparator();
- assertFalse("ibw1 < ibw2", c.compare(ibw1, ibw2) < 0);
+ assertFalse(c.compare(ibw1, ibw2) < 0, "ibw1 < ibw2");
}
@Test
@@ -103,23 +98,22 @@ private void doComparisonsOnRaw(ImmutableBytesWritable a, ImmutableBytesWritable
a.write(new DataOutputStream(baosA));
b.write(new DataOutputStream(baosB));
- assertEquals("Comparing " + a + " and " + b + " as raw", signum(comparator
- .compare(baosA.toByteArray(), 0, baosA.size(), baosB.toByteArray(), 0, baosB.size())),
- expectedSignum);
+ assertEquals(signum(comparator.compare(baosA.toByteArray(), 0, baosA.size(),
+ baosB.toByteArray(), 0, baosB.size())), expectedSignum,
+ "Comparing " + a + " and " + b + " as raw");
- assertEquals(
- "Comparing " + a + " and " + b + " as raw (inverse)", -signum(comparator
- .compare(baosB.toByteArray(), 0, baosB.size(), baosA.toByteArray(), 0, baosA.size())),
- expectedSignum);
+ assertEquals(-signum(comparator.compare(baosB.toByteArray(), 0, baosB.size(),
+ baosA.toByteArray(), 0, baosA.size())), expectedSignum,
+ "Comparing " + a + " and " + b + " as raw (inverse)");
}
private void doComparisonsOnObjects(ImmutableBytesWritable a, ImmutableBytesWritable b,
int expectedSignum) {
ImmutableBytesWritable.Comparator comparator = new ImmutableBytesWritable.Comparator();
- assertEquals("Comparing " + a + " and " + b + " as objects", signum(comparator.compare(a, b)),
- expectedSignum);
- assertEquals("Comparing " + a + " and " + b + " as objects (inverse)",
- -signum(comparator.compare(b, a)), expectedSignum);
+ assertEquals(signum(comparator.compare(a, b)), expectedSignum,
+ "Comparing " + a + " and " + b + " as objects");
+ assertEquals(-signum(comparator.compare(b, a)), expectedSignum,
+ "Comparing " + a + " and " + b + " as objects (inverse)");
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java
index 7e9217d09c0a..7f3ae8ff2b41 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestMetricsIO.java
@@ -18,20 +18,16 @@
package org.apache.hadoop.hbase.io;
import org.apache.hadoop.hbase.CompatibilityFactory;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
+@Tag(MiscTests.TAG)
public class TestMetricsIO {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestMetricsIO.class);
-
public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class);
@Test
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java
index 76a411d390bc..39a63def8f46 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.io.compress;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import java.util.Random;
@@ -86,11 +86,11 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress
HFile.Reader reader = HFile.createReader(FS, path, cacheConf, true, conf);
try {
scanner = reader.getScanner(conf, false, false);
- assertTrue("Initial seekTo failed", scanner.seekTo());
+ assertTrue(scanner.seekTo(), "Initial seekTo failed");
do {
Cell kv = scanner.getCell();
- assertTrue("Read back an unexpected or invalid KV",
- testKvs.contains(KeyValueUtil.ensureKeyValue(kv)));
+ assertTrue(testKvs.contains(KeyValueUtil.ensureKeyValue(kv)),
+ "Read back an unexpected or invalid KV");
i++;
} while (scanner.next());
} finally {
@@ -98,7 +98,7 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress
scanner.close();
}
- assertEquals("Did not read back as many KVs as written", i, testKvs.size());
+ assertEquals(i, testKvs.size(), "Did not read back as many KVs as written");
// Test random seeks with pread
Random rand = ThreadLocalRandom.current();
@@ -106,10 +106,10 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress
reader = HFile.createReader(FS, path, cacheConf, true, conf);
try {
scanner = reader.getScanner(conf, false, true);
- assertTrue("Initial seekTo failed", scanner.seekTo());
+ assertTrue(scanner.seekTo(), "Initial seekTo failed");
for (i = 0; i < 100; i++) {
KeyValue kv = testKvs.get(rand.nextInt(testKvs.size()));
- assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv));
+ assertEquals(0, scanner.seekTo(kv), "Unable to find KV as expected: " + kv);
}
} finally {
scanner.close();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
index 68ce5e359f56..698a0664cf80 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
@@ -17,15 +17,14 @@
*/
package org.apache.hadoop.hbase.io.encoding;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
@@ -37,17 +36,13 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ObjectIntPair;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestBufferedDataBlockEncoder {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBufferedDataBlockEncoder.class);
-
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] row_1_0 = Bytes.toBytes("row10");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index ab837f4d5cae..6e28d478b287 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.io.encoding;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
@@ -29,7 +29,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -49,24 +48,20 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests changing data block encoding settings of a column family.
*/
-@Category({ IOTests.class, LargeTests.class })
+@Tag(IOTests.TAG)
+@Tag(LargeTests.TAG)
public class TestChangingEncoding {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestChangingEncoding.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestChangingEncoding.class);
static final String CF = "EncodingTestCF";
static final byte[] CF_BYTES = Bytes.toBytes(CF);
@@ -104,7 +99,7 @@ private void prepareTest(String testId) throws IOException {
numBatchesWritten = 0;
}
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
// Use a small flush size to create more HFiles.
conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
@@ -117,7 +112,7 @@ public static void setUpBeforeClass() throws Exception {
TEST_UTIL.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index a502113325fa..dc0dc5be7721 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.io.encoding;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
@@ -27,18 +27,18 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -55,13 +55,8 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.RedundantKVGenerator;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -69,14 +64,12 @@
* Test all of the data block encoding algorithms for correctness. Most of the class generate data
* which will test different branches in code.
*/
-@Category({ IOTests.class, LargeTests.class })
-@RunWith(Parameterized.class)
+@org.junit.jupiter.api.Tag(IOTests.TAG)
+@org.junit.jupiter.api.Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(
+ name = "{index}: includesMemstoreTS={0}, includesTags={1}, useOffheapData={2}")
public class TestDataBlockEncoders {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestDataBlockEncoders.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestDataBlockEncoders.class);
private static int NUMBER_OF_KV = 10000;
@@ -92,9 +85,8 @@ public class TestDataBlockEncoders {
private final boolean includesTags;
private final boolean useOffheapData;
- @Parameters
- public static Collection
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestHFileSeek {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHFileSeek.class);
-
- private static final byte[] CF = "f1".getBytes();
- private static final byte[] QUAL = "q1".getBytes();
+ private static final byte[] CF = Bytes.toBytes("f1");
+ private static final byte[] QUAL = Bytes.toBytes("q1");
private static final boolean USE_PREAD = true;
private MyOptions options;
private Configuration conf;
@@ -80,7 +76,7 @@ public class TestHFileSeek {
private static final Logger LOG = LoggerFactory.getLogger(TestHFileSeek.class);
- @Before
+ @BeforeEach
public void setUp() throws IOException {
if (options == null) {
options = new MyOptions(new String[0]);
@@ -107,7 +103,7 @@ public void setUp() throws IOException {
kvGen = new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen, options.dictSize);
}
- @After
+ @AfterEach
public void tearDown() {
try {
fs.close();
@@ -162,10 +158,10 @@ private void createTFile() throws IOException {
double duration = (double) timer.read() / 1000; // in us.
long fsize = fs.getFileStatus(path).getLen();
- System.out.printf("time: %s...uncompressed: %.2fMB...raw thrpt: %.2fMB/s\n", timer.toString(),
- (double) totalBytes / 1024 / 1024, totalBytes / duration);
- System.out.printf("time: %s...file size: %.2fMB...disk thrpt: %.2fMB/s\n", timer.toString(),
- (double) fsize / 1024 / 1024, fsize / duration);
+ LOG.info(String.format("time: %s...uncompressed: %.2fMB...raw thrpt: %.2fMB/s\n",
+ timer.toString(), (double) totalBytes / 1024 / 1024, totalBytes / duration));
+ LOG.info(String.format("time: %s...file size: %.2fMB...disk thrpt: %.2fMB/s\n",
+ timer.toString(), (double) fsize / 1024 / 1024, fsize / duration));
}
public void seekTFile() throws IOException {
@@ -194,9 +190,9 @@ public void seekTFile() throws IOException {
}
}
timer.stop();
- System.out.printf("time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
+ LOG.info(String.format("time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n",
timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount),
- options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss));
+ options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss)));
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
index 40220f07bb72..08b073e6798a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
@@ -17,17 +17,17 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.Random;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -35,8 +35,8 @@
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -53,27 +53,20 @@
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Testing writing a version 3 {@link HFile}.
*/
-@RunWith(Parameterized.class)
-@Category({ IOTests.class, SmallTests.class })
+@org.junit.jupiter.api.Tag(IOTests.TAG)
+@org.junit.jupiter.api.Tag(SmallTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: useTags={0}")
public class TestHFileWriterV3 {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHFileWriterV3.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestHFileWriterV3.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final Random RNG = new Random(9713312); // Just a fixed seed.
@@ -86,18 +79,17 @@ public TestHFileWriterV3(boolean useTags) {
this.useTags = useTags;
}
- @Parameters
- public static Collection parameters() {
- return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
+ public static Stream parameters() {
+ return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED.stream().map(arr -> Arguments.of(arr));
}
- @Before
+ @BeforeEach
public void setUp() throws IOException {
conf = TEST_UTIL.getConfiguration();
fs = FileSystem.get(conf);
}
- @Test
+ @TestTemplate
public void testHFileFormatV3() throws IOException {
testHFileFormatV3Internals(useTags);
}
@@ -109,7 +101,7 @@ private void testHFileFormatV3Internals(boolean useTags) throws IOException {
writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags);
}
- @Test
+ @TestTemplate
public void testMidKeyInHFile() throws IOException {
testMidKeyInHFileInternals(useTags);
}
@@ -197,7 +189,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Algorithm compressAlgo, i
hfile.initMetaAndIndex(reader);
if (findMidKey) {
Cell midkey = dataBlockIndexReader.midkey(reader);
- assertNotNull("Midkey should not be null", midkey);
+ assertNotNull(midkey, "Midkey should not be null");
}
// Meta index.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java
index 09c259479258..92e2814dca19 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3WithDataEncoders.java
@@ -17,12 +17,14 @@
*/
package org.apache.hadoop.hbase.io.hfile;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
import java.util.List;
import java.util.Random;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -31,7 +33,7 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -48,27 +50,20 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Text;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Testing writing a version 3 {@link HFile} for all encoded blocks
*/
-@RunWith(Parameterized.class)
-@Category({ IOTests.class, MediumTests.class })
+@org.junit.jupiter.api.Tag(IOTests.TAG)
+@org.junit.jupiter.api.Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: useTags={0}, dataBlockEncoding={1}")
public class TestHFileWriterV3WithDataEncoders {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestHFileWriterV3WithDataEncoders.class);
-
private static final Logger LOG =
LoggerFactory.getLogger(TestHFileWriterV3WithDataEncoders.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -84,28 +79,26 @@ public TestHFileWriterV3WithDataEncoders(boolean useTags, DataBlockEncoding data
this.dataBlockEncoding = dataBlockEncoding;
}
- @Parameterized.Parameters
- public static Collection parameters() {
+ public static Stream parameters() {
DataBlockEncoding[] dataBlockEncodings = DataBlockEncoding.values();
- Object[][] params = new Object[dataBlockEncodings.length * 2 - 2][];
- int i = 0;
+ Stream.Builder builder = Stream.builder();
for (DataBlockEncoding dataBlockEncoding : dataBlockEncodings) {
if (dataBlockEncoding == DataBlockEncoding.NONE) {
continue;
}
- params[i++] = new Object[] { false, dataBlockEncoding };
- params[i++] = new Object[] { true, dataBlockEncoding };
+ builder.add(Arguments.of(false, dataBlockEncoding));
+ builder.add(Arguments.of(true, dataBlockEncoding));
}
- return Arrays.asList(params);
+ return builder.build();
}
- @Before
+ @BeforeEach
public void setUp() throws IOException {
conf = TEST_UTIL.getConfiguration();
fs = FileSystem.get(conf);
}
- @Test
+ @TestTemplate
public void testHFileFormatV3() throws IOException {
testHFileFormatV3Internals(useTags);
}
@@ -117,7 +110,7 @@ private void testHFileFormatV3Internals(boolean useTags) throws IOException {
writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags);
}
- @Test
+ @TestTemplate
public void testMidKeyInHFile() throws IOException {
testMidKeyInHFileInternals(useTags);
}
@@ -147,8 +140,8 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com
long fileSize = fs.getFileStatus(hfilePath).getLen();
FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize);
- Assert.assertEquals(3, trailer.getMajorVersion());
- Assert.assertEquals(entryCount, trailer.getEntryCount());
+ assertEquals(3, trailer.getMajorVersion());
+ assertEquals(entryCount, trailer.getEntryCount());
HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo)
.withIncludesMvcc(true).withIncludesTags(useTags).withDataBlockEncoding(dataBlockEncoding)
.withHBaseCheckSum(true).build();
@@ -180,7 +173,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com
hfile.initMetaAndIndex(reader);
if (findMidKey) {
Cell midkey = dataBlockIndexReader.midkey(reader);
- Assert.assertNotNull("Midkey should not be null", midkey);
+ assertNotNull(midkey, "Midkey should not be null");
}
// Meta index.
@@ -213,7 +206,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com
trailer.getLoadOnOpenDataOffset());
HFileBlock block =
blockReader.readBlockData(curBlockPos, -1, false, false, true).unpack(context, blockReader);
- Assert.assertEquals(BlockType.META, block.getBlockType());
+ assertEquals(BlockType.META, block.getBlockType());
Text t = new Text();
ByteBuff buf = block.getBufferWithoutHeader();
if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) {
@@ -223,7 +216,7 @@ private void writeDataAndReadFromHFile(Path hfilePath, Compression.Algorithm com
Text expectedText = (metaCounter == 0 ? new Text("Paris")
: metaCounter == 1 ? new Text("Moscow")
: new Text("Washington, D.C."));
- Assert.assertEquals(expectedText, t);
+ assertEquals(expectedText, t);
LOG.info("Read meta block data: " + t);
++metaCounter;
curBlockPos += block.getOnDiskSizeWithHeader();
@@ -244,7 +237,7 @@ private long scanBlocks(int entryCount, HFileContext context, List key
HFileBlockDecodingContext ctx = blockReader.getBlockDecodingContext();
HFileBlock block =
blockReader.readBlockData(curBlockPos, -1, false, false, true).unpack(context, blockReader);
- Assert.assertEquals(BlockType.ENCODED_DATA, block.getBlockType());
+ assertEquals(BlockType.ENCODED_DATA, block.getBlockType());
ByteBuff origBlock = block.getBufferReadOnly();
int pos = block.headerSize() + DataBlockEncoding.ID_SIZE;
origBlock.position(pos);
@@ -255,19 +248,19 @@ private long scanBlocks(int entryCount, HFileContext context, List key
seeker.setCurrentBuffer(buf);
Cell res = seeker.getCell();
KeyValue kv = keyValues.get(entriesRead);
- Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv));
+ assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv));
++entriesRead;
while (seeker.next()) {
res = seeker.getCell();
kv = keyValues.get(entriesRead);
- Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv));
+ assertEquals(0, CellComparatorImpl.COMPARATOR.compare(res, kv));
++entriesRead;
}
++blocksRead;
curBlockPos += block.getOnDiskSizeWithHeader();
}
LOG.info("Finished reading: entries={}, blocksRead = {}", entriesRead, blocksRead);
- Assert.assertEquals(entryCount, entriesRead);
+ assertEquals(entryCount, entriesRead);
return curBlockPos;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
index f212eeb6bb51..4696372ee2ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
@@ -17,21 +17,21 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Random;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -40,13 +40,11 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -56,33 +54,33 @@
* A kind of integration test at the intersection of {@link HFileBlock}, {@link CacheConfig}, and
* {@link LruBlockCache}.
*/
-@Category({ IOTests.class, SmallTests.class })
-@RunWith(Parameterized.class)
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: cacheOnWrite={0}")
public class TestLazyDataBlockDecompression {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestLazyDataBlockDecompression.class);
private static final Logger LOG = LoggerFactory.getLogger(TestLazyDataBlockDecompression.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final Random RNG = new Random(9713312); // Just a fixed seed.
private FileSystem fs;
- @Parameterized.Parameter(0)
- public boolean cacheOnWrite;
+ private final boolean cacheOnWrite;
- @Parameterized.Parameters
- public static Iterable data() {
- return Arrays.asList(new Object[][] { { false }, { true } });
+ public static Stream parameters() {
+ return Stream.of(Arguments.of(false), Arguments.of(true));
}
- @Before
+ public TestLazyDataBlockDecompression(boolean cacheOnWrite) {
+ this.cacheOnWrite = cacheOnWrite;
+ }
+
+ @BeforeEach
public void setUp() throws IOException {
fs = FileSystem.get(TEST_UTIL.getConfiguration());
}
- @After
+ @AfterEach
public void tearDown() {
fs = null;
}
@@ -134,7 +132,7 @@ private static void cacheBlocks(Configuration conf, CacheConfig cacheConfig, Fil
reader.close();
}
- @Test
+ @TestTemplate
public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception {
// enough room for 2 uncompressed block
int maxSize = (int) (HConstants.DEFAULT_BLOCKSIZE * 2.1);
@@ -156,11 +154,11 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception {
assertFalse(cc.isCombinedBlockCache());
LruBlockCache disabledBlockCache = (LruBlockCache) cc.getBlockCache().get();
LOG.info("disabledBlockCache=" + disabledBlockCache);
- assertEquals("test inconsistency detected.", maxSize, disabledBlockCache.getMaxSize());
- assertTrue("eviction thread spawned unintentionally.",
- disabledBlockCache.getEvictionThread() == null);
- assertEquals("freshly created blockcache contains blocks.", 0,
- disabledBlockCache.getBlockCount());
+ assertEquals(maxSize, disabledBlockCache.getMaxSize(), "test inconsistency detected.");
+ assertTrue(disabledBlockCache.getEvictionThread() == null,
+ "eviction thread spawned unintentionally.");
+ assertEquals(0, disabledBlockCache.getBlockCount(),
+ "freshly created blockcache contains blocks.");
// 2000 kv's is ~3.6 full unencoded data blocks.
// Requires a conf and CacheConfig but should not be specific to this instance's cache settings
@@ -169,13 +167,13 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception {
// populate the cache
cacheBlocks(lazyCompressDisabled, cc, fs, hfilePath, context);
long disabledBlockCount = disabledBlockCache.getBlockCount();
- assertTrue("blockcache should contain blocks. disabledBlockCount=" + disabledBlockCount,
- disabledBlockCount > 0);
+ assertTrue(disabledBlockCount > 0,
+ "blockcache should contain blocks. disabledBlockCount=" + disabledBlockCount);
long disabledEvictedCount = disabledBlockCache.getStats().getEvictedCount();
for (Map.Entry e : disabledBlockCache.getMapForTests()
.entrySet()) {
HFileBlock block = (HFileBlock) e.getValue().getBuffer();
- assertTrue("found a packed block, block=" + block, block.isUnpacked());
+ assertTrue(block.isUnpacked(), "found a packed block, block=" + block);
}
// count blocks with lazy decompression
@@ -186,20 +184,20 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception {
lazyCompressEnabled.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);
cc = new CacheConfig(lazyCompressEnabled,
new LruBlockCache(maxSize, HConstants.DEFAULT_BLOCKSIZE, false, lazyCompressEnabled));
- assertTrue("test improperly configured.", cc.shouldCacheDataCompressed());
+ assertTrue(cc.shouldCacheDataCompressed(), "test improperly configured.");
assertTrue(cc.getBlockCache().get() instanceof LruBlockCache);
LruBlockCache enabledBlockCache = (LruBlockCache) cc.getBlockCache().get();
LOG.info("enabledBlockCache=" + enabledBlockCache);
- assertEquals("test inconsistency detected", maxSize, enabledBlockCache.getMaxSize());
- assertTrue("eviction thread spawned unintentionally.",
- enabledBlockCache.getEvictionThread() == null);
- assertEquals("freshly created blockcache contains blocks.", 0,
- enabledBlockCache.getBlockCount());
+ assertEquals(maxSize, enabledBlockCache.getMaxSize(), "test inconsistency detected");
+ assertTrue(enabledBlockCache.getEvictionThread() == null,
+ "eviction thread spawned unintentionally.");
+ assertEquals(0, enabledBlockCache.getBlockCount(),
+ "freshly created blockcache contains blocks.");
cacheBlocks(lazyCompressEnabled, cc, fs, hfilePath, context);
long enabledBlockCount = enabledBlockCache.getBlockCount();
- assertTrue("blockcache should contain blocks. enabledBlockCount=" + enabledBlockCount,
- enabledBlockCount > 0);
+ assertTrue(enabledBlockCount > 0,
+ "blockcache should contain blocks. enabledBlockCount=" + enabledBlockCount);
long enabledEvictedCount = enabledBlockCache.getStats().getEvictedCount();
int candidatesFound = 0;
for (Map.Entry e : enabledBlockCache.getMapForTests()
@@ -207,24 +205,24 @@ public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception {
candidatesFound++;
HFileBlock block = (HFileBlock) e.getValue().getBuffer();
if (cc.shouldCacheCompressed(block.getBlockType().getCategory())) {
- assertFalse("found an unpacked block, block=" + block + ", block buffer capacity="
- + block.getBufferWithoutHeader().capacity(), block.isUnpacked());
+ assertFalse(block.isUnpacked(), "found an unpacked block, block=" + block
+ + ", block buffer capacity=" + block.getBufferWithoutHeader().capacity());
}
}
- assertTrue("did not find any candidates for compressed caching. Invalid test.",
- candidatesFound > 0);
+ assertTrue(candidatesFound > 0,
+ "did not find any candidates for compressed caching. Invalid test.");
LOG.info(
"disabledBlockCount=" + disabledBlockCount + ", enabledBlockCount=" + enabledBlockCount);
- assertTrue(
+ assertTrue(disabledBlockCount < enabledBlockCount,
"enabling compressed data blocks should increase the effective cache size. "
- + "disabledBlockCount=" + disabledBlockCount + ", enabledBlockCount=" + enabledBlockCount,
- disabledBlockCount < enabledBlockCount);
+ + "disabledBlockCount=" + disabledBlockCount + ", enabledBlockCount=" + enabledBlockCount);
LOG.info("disabledEvictedCount=" + disabledEvictedCount + ", enabledEvictedCount="
+ enabledEvictedCount);
- assertTrue("enabling compressed data blocks should reduce the number of evictions. "
- + "disabledEvictedCount=" + disabledEvictedCount + ", enabledEvictedCount="
- + enabledEvictedCount, enabledEvictedCount < disabledEvictedCount);
+ assertTrue(enabledEvictedCount < disabledEvictedCount,
+ "enabling compressed data blocks should reduce the number of evictions. "
+ + "disabledEvictedCount=" + disabledEvictedCount + ", enabledEvictedCount="
+ + enabledEvictedCount);
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java
index 00f43fcced94..cb40a77965ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruAdaptiveBlockCache.java
@@ -18,10 +18,11 @@
package org.apache.hadoop.hbase.io.hfile;
import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.ByteBuffer;
import java.util.Random;
@@ -32,7 +33,6 @@
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Waiter;
@@ -43,10 +43,8 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ClassSize;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -56,13 +54,10 @@
* Tests will ensure it grows and shrinks in size properly, evictions run when they're supposed to
* and do what they should, and that cached blocks are accessible when expected to be.
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestLruAdaptiveBlockCache {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestLruAdaptiveBlockCache.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestLruAdaptiveBlockCache.class);
private static final Configuration CONF = HBaseConfiguration.create();
@@ -73,7 +68,7 @@ public void testCacheEvictionThreadSafe() throws Exception {
int numBlocks = 9;
int testRuns = 10;
final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
- assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
+ assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken.");
final LruAdaptiveBlockCache cache = new LruAdaptiveBlockCache(maxSize, blockSize);
EvictionThread evictionThread = cache.getEvictionThread();
@@ -121,7 +116,7 @@ public void testBackgroundEvictionThread() throws Exception {
long maxSize = 100000;
int numBlocks = 9;
long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
- assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
+ assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken.");
LruAdaptiveBlockCache cache = new LruAdaptiveBlockCache(maxSize, blockSize);
EvictionThread evictionThread = cache.getEvictionThread();
@@ -162,7 +157,7 @@ public String explainFailure() throws Exception {
for (long prevCnt = 0 /* < number of blocks added */, curCnt = cache.getBlockCount(); prevCnt
!= curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) {
Thread.sleep(200);
- assertTrue("Cache never stabilized.", n++ < 100);
+ assertTrue(n++ < 100, "Cache never stabilized.");
}
long evictionCount = cache.getStats().getEvictionCount();
@@ -207,8 +202,8 @@ public void testCacheSimple() throws Exception {
for (CachedItem block : blocks) {
cache.cacheBlock(block.cacheKey, block);
}
- assertEquals("Cache should ignore cache requests for blocks already in cache",
- expectedBlockCount, cache.getBlockCount());
+ assertEquals(expectedBlockCount, cache.getBlockCount(),
+ "Cache should ignore cache requests for blocks already in cache");
// Verify correctly calculated cache heap size
assertEquals(expectedCacheSize, cache.heapSize());
@@ -978,9 +973,9 @@ static void testMultiThreadGetAndEvictBlockInternal(BlockCache cache) throws Exc
t1.join();
t2.join();
t3.join();
- Assert.assertFalse(err1.get());
- Assert.assertFalse(err2.get());
- Assert.assertFalse(err3.get());
+ assertFalse(err1.get());
+ assertFalse(err2.get());
+ assertFalse(err3.get());
}
@Test
@@ -1003,7 +998,7 @@ public void testSkipCacheDataBlocksInteral(int heavyEvictionCountLimit) throws E
long maxSize = 100000000;
int numBlocks = 100000;
final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
- assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
+ assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken.");
final LruAdaptiveBlockCache cache = new LruAdaptiveBlockCache(maxSize, blockSize, true,
(int) Math.ceil(1.2 * maxSize / blockSize), LruAdaptiveBlockCache.DEFAULT_LOAD_FACTOR,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
index 59cacc154a5e..e2248cb9e6fa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
@@ -18,10 +18,11 @@
package org.apache.hadoop.hbase.io.hfile;
import static org.apache.hadoop.hbase.io.ByteBuffAllocator.HEAP;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.ByteBuffer;
import java.util.Random;
@@ -32,7 +33,6 @@
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Waiter;
@@ -43,10 +43,8 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ClassSize;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -56,13 +54,10 @@
* Tests will ensure it grows and shrinks in size properly, evictions run when they're supposed to
* and do what they should, and that cached blocks are accessible when expected to be.
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestLruBlockCache {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestLruBlockCache.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestLruBlockCache.class);
private static final Configuration CONF = HBaseConfiguration.create();
@@ -73,7 +68,7 @@ public void testCacheEvictionThreadSafe() throws Exception {
int numBlocks = 9;
int testRuns = 10;
final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
- assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
+ assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken.");
final LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
EvictionThread evictionThread = cache.getEvictionThread();
@@ -124,7 +119,7 @@ public void testBackgroundEvictionThread() throws Exception {
long maxSize = 100000;
int numBlocks = 9;
long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
- assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
+ assertTrue(blockSize * numBlocks <= maxSize, "calculateBlockSize appears broken.");
LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
EvictionThread evictionThread = cache.getEvictionThread();
@@ -165,7 +160,7 @@ public String explainFailure() throws Exception {
for (long prevCnt = 0 /* < number of blocks added */, curCnt = cache.getBlockCount(); prevCnt
!= curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) {
Thread.sleep(200);
- assertTrue("Cache never stabilized.", n++ < 100);
+ assertTrue(n++ < 100, "Cache never stabilized.");
}
long evictionCount = cache.getStats().getEvictionCount();
@@ -210,8 +205,8 @@ public void testCacheSimple() throws Exception {
for (CachedItem block : blocks) {
cache.cacheBlock(block.cacheKey, block);
}
- assertEquals("Cache should ignore cache requests for blocks already in cache",
- expectedBlockCount, cache.getBlockCount());
+ assertEquals(expectedBlockCount, cache.getBlockCount(),
+ "Cache should ignore cache requests for blocks already in cache");
// Verify correctly calculated cache heap size
assertEquals(expectedCacheSize, cache.heapSize());
@@ -980,9 +975,9 @@ static void testMultiThreadGetAndEvictBlockInternal(BlockCache cache) throws Exc
t1.join();
t2.join();
t3.join();
- Assert.assertFalse(err1.get());
- Assert.assertFalse(err2.get());
- Assert.assertFalse(err3.get());
+ assertFalse(err1.get());
+ assertFalse(err2.get());
+ assertFalse(err3.get());
}
@Test
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java
index eb57b0acd652..ff7e495142e7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java
@@ -17,30 +17,25 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestLruCachedBlock {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestLruCachedBlock.class);
-
LruCachedBlock block;
LruCachedBlock blockEqual;
LruCachedBlock blockNotEqual;
- @Before
+ @BeforeEach
public void setUp() throws Exception {
BlockCacheKey cacheKey = new BlockCacheKey("name", 0);
BlockCacheKey otherKey = new BlockCacheKey("name2", 1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
index 8facb1f4b5be..91a4b3369a88 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
@@ -29,12 +29,12 @@
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.not;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
-import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
+import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension;
import io.opentelemetry.sdk.trace.data.SpanData;
import java.io.IOException;
import java.util.List;
@@ -48,7 +48,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
@@ -81,21 +80,18 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.Pair;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestPrefetch {
- private static final Logger LOG = LoggerFactory.getLogger(TestPrefetch.class);
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestPrefetch.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestPrefetch.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -107,10 +103,10 @@ public class TestPrefetch {
private FileSystem fs;
private BlockCache blockCache;
- @Rule
- public OpenTelemetryRule otelRule = OpenTelemetryRule.create();
+ @RegisterExtension
+ private static OpenTelemetryExtension OTEL_EXT = OpenTelemetryExtension.create();
- @Before
+ @BeforeEach
public void setUp() throws IOException, InterruptedException {
conf = TEST_UTIL.getConfiguration();
conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
@@ -170,9 +166,9 @@ public void testPrefetch() throws Exception {
readStoreFile(storeFile);
}, "testPrefetch");
- TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate<>(otelRule::getSpans,
+ TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new MatcherPredicate<>(OTEL_EXT::getSpans,
hasItems(hasName("testPrefetch"), hasName("PrefetchExecutor.request"))));
- final List spans = otelRule.getSpans();
+ final List spans = OTEL_EXT.getSpans();
if (LOG.isDebugEnabled()) {
StringTraceRenderer renderer = new StringTraceRenderer(spans);
renderer.render(LOG::debug);
@@ -329,14 +325,13 @@ public void testPrefetchWithDelay() throws Exception {
// Wait for 20 seconds, no thread should start prefetch
Thread.sleep(20000);
- assertFalse("Prefetch threads should not be running at this point", reader.prefetchStarted());
- long timeout = 10000;
+ assertFalse(reader.prefetchStarted(), "Prefetch threads should not be running at this point");
Waiter.waitFor(conf, 10000, () -> (reader.prefetchStarted() || reader.prefetchComplete()));
assertTrue(reader.prefetchStarted() || reader.prefetchComplete());
- assertTrue("Prefetch should start post configured delay",
- getElapsedTime(startTime) > PrefetchExecutor.getPrefetchDelay());
+ assertTrue(getElapsedTime(startTime) > PrefetchExecutor.getPrefetchDelay(),
+ "Prefetch should start post configured delay");
conf.setInt(PREFETCH_DELAY, 1000);
conf.setFloat(PREFETCH_DELAY_VARIATION, PREFETCH_DELAY_VARIATION_DEFAULT_VALUE);
@@ -514,7 +509,7 @@ private Pair writeStoreFileForSplit(Path storeDir, HFileContext co
}
}
sfw.close();
- return new Pair(sfw.getPath(), splitPoint);
+ return new Pair<>(sfw.getPath(), splitPoint);
}
public static KeyValue.Type generateKeyType(Random rand) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java
index 7ca5e34e6db5..1e5da4ba8d04 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchRSClose.java
@@ -18,14 +18,13 @@
package org.apache.hadoop.hbase.io.hfile;
import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.StartMiniClusterOption;
@@ -42,21 +41,17 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ IOTests.class, LargeTests.class })
+@Tag(IOTests.TAG)
+@Tag(LargeTests.TAG)
public class TestPrefetchRSClose {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestPrefetchRSClose.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchRSClose.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -67,7 +62,7 @@ public class TestPrefetchRSClose {
MiniHBaseCluster cluster;
StartMiniClusterOption option = StartMiniClusterOption.builder().numRegionServers(1).build();
- @Before
+ @BeforeEach
public void setup() throws Exception {
conf = TEST_UTIL.getConfiguration();
testDir = TEST_UTIL.getDataTestDir();
@@ -130,7 +125,7 @@ public void testPrefetchPersistence() throws Exception {
assertTrue(new File(testDir + "/bucket.persistence").exists());
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
TEST_UTIL.shutdownMiniCluster();
TEST_UTIL.cleanupDataTestDirOnTestFS(String.valueOf(testDir));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java
index 688802c28e25..3845e4649257 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetchWithBucketCache.java
@@ -20,11 +20,11 @@
import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY;
import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;
import static org.apache.hadoop.hbase.io.hfile.BlockCacheFactory.BUCKET_CACHE_BUCKETS_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.File;
import java.io.IOException;
@@ -37,7 +37,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
@@ -64,30 +63,22 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestPrefetchWithBucketCache {
private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchWithBucketCache.class);
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestPrefetchWithBucketCache.class);
-
- @Rule
- public TestName name = new TestName();
-
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
@@ -97,20 +88,20 @@ public class TestPrefetchWithBucketCache {
private FileSystem fs;
private BlockCache blockCache;
- @Before
- public void setUp() throws IOException {
+ @BeforeEach
+ public void setUp(TestInfo testInfo) throws IOException {
conf = TEST_UTIL.getConfiguration();
conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
fs = HFileSystem.get(conf);
- File testDir = new File(name.getMethodName());
+ File testDir = new File(testInfo.getTestMethod().get().getName());
testDir.mkdir();
conf.set(BUCKET_CACHE_IOENGINE_KEY, "file:/" + testDir.getAbsolutePath() + "/bucket.cache");
}
- @After
- public void tearDown() {
- File cacheFile = new File(name.getMethodName() + "/bucket.cache");
- File dir = new File(name.getMethodName());
+ @AfterEach
+ public void tearDown(TestInfo testInfo) {
+ File cacheFile = new File(testInfo.getTestMethod().get().getName() + "/bucket.cache");
+ File dir = new File(testInfo.getTestMethod().get().getName());
cacheFile.delete();
dir.delete();
}
@@ -283,15 +274,15 @@ public void testPrefetchMetricProgress() throws Exception {
}
@Test
- public void testPrefetchMetricProgressForLinks() throws Exception {
+ public void testPrefetchMetricProgressForLinks(TestInfo testInfo) throws Exception {
conf.setLong(BUCKET_CACHE_SIZE_KEY, 200);
blockCache = BlockCacheFactory.createBlockCache(conf);
cacheConf = new CacheConfig(conf, blockCache);
- final RegionInfo hri =
- RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
+ final RegionInfo hri = RegionInfoBuilder
+ .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build();
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf);
- Path testDir = TEST_UTIL.getDataTestDir(name.getMethodName());
+ Path testDir = TEST_UTIL.getDataTestDir(testInfo.getTestMethod().get().getName());
CommonFSUtils.setRootDir(testConf, testDir);
Path tableDir = CommonFSUtils.getTableDir(testDir, hri.getTable());
RegionInfo region = RegionInfoBuilder.newBuilder(TableName.valueOf(tableDir.getName())).build();
@@ -308,8 +299,8 @@ public void testPrefetchMetricProgressForLinks() throws Exception {
Waiter.waitFor(testConf, 300, () -> bc.getBackingMap().size() == 6);
long cachedSize = bc.getRegionCachedInfo().get().get(region.getEncodedName());
- final RegionInfo dstHri =
- RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
+ final RegionInfo dstHri = RegionInfoBuilder
+ .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build();
HRegionFileSystem dstRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
CommonFSUtils.getTableDir(testDir, dstHri.getTable()), dstHri);
@@ -340,18 +331,18 @@ public void testPrefetchMetricProgressForLinks() throws Exception {
}
@Test
- public void testPrefetchMetricProgressForLinksToArchived() throws Exception {
+ public void testPrefetchMetricProgressForLinksToArchived(TestInfo testInfo) throws Exception {
conf.setLong(BUCKET_CACHE_SIZE_KEY, 200);
blockCache = BlockCacheFactory.createBlockCache(conf);
cacheConf = new CacheConfig(conf, blockCache);
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf);
- Path testDir = TEST_UTIL.getDataTestDir(name.getMethodName());
+ Path testDir = TEST_UTIL.getDataTestDir(testInfo.getTestMethod().get().getName());
CommonFSUtils.setRootDir(testConf, testDir);
- final RegionInfo hri =
- RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
+ final RegionInfo hri = RegionInfoBuilder
+ .newBuilder(TableName.valueOf(testInfo.getTestMethod().get().getName())).build();
Path tableDir = CommonFSUtils.getTableDir(testDir, hri.getTable());
RegionInfo region = RegionInfoBuilder.newBuilder(TableName.valueOf(tableDir.getName())).build();
Path regionDir = new Path(tableDir, region.getEncodedName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
index 3089fe36901b..ac6cd15b282f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
@@ -25,7 +25,6 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -33,20 +32,15 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Test;
/**
* Test {@link HFileScanner#reseekTo(org.apache.hadoop.hbase.Cell)}
*/
-@Category({ IOTests.class, SmallTests.class })
+@org.junit.jupiter.api.Tag(IOTests.TAG)
+@org.junit.jupiter.api.Tag(SmallTests.TAG)
public class TestReseekTo {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestReseekTo.class);
-
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@Test
@@ -128,7 +122,7 @@ private void testReseekToInternals(TagUsage tagUsage) throws IOException {
long start = System.nanoTime();
scanner.reseekTo(new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"),
Bytes.toBytes("qual"), Bytes.toBytes(value)));
- assertEquals("i is " + i, value, scanner.getValueString());
+ assertEquals(value, scanner.getValueString(), "i is " + i);
}
reader.close();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java
index 8653c4d416d5..5cd6f635aaf5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1DataEncoder.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.io.hfile;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
@@ -26,23 +28,18 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparatorImpl;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestRowIndexV1DataEncoder {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRowIndexV1DataEncoder.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -50,7 +47,7 @@ public class TestRowIndexV1DataEncoder {
private FileSystem fs;
private DataBlockEncoding dataBlockEncoding;
- @Before
+ @BeforeEach
public void setUp() throws IOException {
conf = TEST_UTIL.getConfiguration();
fs = FileSystem.get(conf);
@@ -89,7 +86,7 @@ private void writeDataToHFile(Path hfilePath, int entryCount) throws IOException
// Without the patch it would have produced 244 blocks (each block of 1236 bytes)
// Earlier this would create blocks ~20% greater than the block size of 1024 bytes
// After this patch actual block size is ~2% greater than the block size of 1024 bytes
- Assert.assertEquals(278, trailer.getDataIndexCount());
+ assertEquals(278, trailer.getDataIndexCount());
}
private void writeKeyValues(int entryCount, HFile.Writer writer, List keyValues)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java
index 2004e20aad6c..44ff11cd7c81 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestRowIndexV1RoundTrip.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.io.hfile;
import static org.apache.hadoop.hbase.io.ByteBuffAllocator.MIN_ALLOCATE_SIZE_KEY;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -31,7 +31,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SizeCachedNoTagsByteBufferKeyValue;
@@ -41,16 +40,14 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestRowIndexV1RoundTrip {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRowIndexV1RoundTrip.class);
+
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final DataBlockEncoding DATA_BLOCK_ENCODING = DataBlockEncoding.ROW_INDEX_V1;
private static final int ENTRY_COUNT = 100;
@@ -58,7 +55,7 @@ public class TestRowIndexV1RoundTrip {
private Configuration conf;
private FileSystem fs;
- @Before
+ @BeforeEach
public void setUp() throws IOException {
conf = TEST_UTIL.getConfiguration();
conf.setLong(MIN_ALLOCATE_SIZE_KEY, 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
index a5389c75a8c0..33a93e7085fd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
@@ -27,7 +27,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -47,25 +46,18 @@
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
-import org.junit.After;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ RegionServerTests.class, SmallTests.class })
+@Tag(RegionServerTests.TAG)
+@Tag(SmallTests.TAG)
public class TestScannerFromBucketCache {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestScannerFromBucketCache.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestScannerFromBucketCache.class);
- @Rule
- public TestName name = new TestName();
HRegion region = null;
private HBaseTestingUtility test_util;
@@ -76,8 +68,8 @@ public class TestScannerFromBucketCache {
// Test names
private TableName tableName;
- private void setUp(boolean useBucketCache) throws IOException {
- test_util = HBaseTestingUtility.createLocalHTU();
+ private void setUp(boolean useBucketCache, TestInfo testInfo) throws IOException {
+ test_util = new HBaseTestingUtility();
conf = test_util.getConfiguration();
if (useBucketCache) {
conf.setInt("hbase.bucketcache.size", 400);
@@ -86,23 +78,23 @@ private void setUp(boolean useBucketCache) throws IOException {
conf.setFloat("hfile.block.cache.size", 0.2f);
conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f);
}
- tableName = TableName.valueOf(name.getMethodName());
+ tableName = TableName.valueOf(testInfo.getTestMethod().get().getName());
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
EnvironmentEdgeManagerTestHelper.reset();
LOG.info("Cleaning test directory: " + test_util.getDataTestDir());
test_util.cleanupTestDir();
}
- String getName() {
- return name.getMethodName();
+ String getName(TestInfo testInfo) {
+ return testInfo.getTestMethod().get().getName();
}
@Test
- public void testBasicScanWithLRUCache() throws IOException {
- setUp(false);
+ public void testBasicScanWithLRUCache(TestInfo testInfo) throws IOException {
+ setUp(false, testInfo);
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
@@ -113,7 +105,7 @@ public void testBasicScanWithLRUCache() throws IOException {
long ts3 = ts1 + 2;
// Setting up region
- String method = this.getName();
+ String method = this.getName(testInfo);
this.region = initHRegion(tableName, method, conf, test_util, fam1);
try {
List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false);
@@ -139,8 +131,8 @@ public void testBasicScanWithLRUCache() throws IOException {
}
@Test
- public void testBasicScanWithOffheapBucketCache() throws IOException {
- setUp(true);
+ public void testBasicScanWithOffheapBucketCache(TestInfo testInfo) throws IOException {
+ setUp(true, testInfo);
byte[] row1 = Bytes.toBytes("row1offheap");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
@@ -151,7 +143,7 @@ public void testBasicScanWithOffheapBucketCache() throws IOException {
long ts3 = ts1 + 2;
// Setting up region
- String method = this.getName();
+ String method = this.getName(testInfo);
this.region = initHRegion(tableName, method, conf, test_util, fam1);
try {
List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false);
@@ -180,8 +172,8 @@ public void testBasicScanWithOffheapBucketCache() throws IOException {
}
@Test
- public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException {
- setUp(true);
+ public void testBasicScanWithOffheapBucketCacheWithMBB(TestInfo testInfo) throws IOException {
+ setUp(true, testInfo);
byte[] row1 = Bytes.toBytes("row1offheap");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
@@ -192,7 +184,7 @@ public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException {
long ts3 = ts1 + 2;
// Setting up region
- String method = this.getName();
+ String method = this.getName(testInfo);
this.region = initHRegion(tableName, method, conf, test_util, fam1);
try {
List| expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
index fa3ecf7a8761..f9b0b2224cb3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
@@ -17,18 +17,18 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -42,26 +42,19 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
/**
* Test the optimization that does not scan files where all key ranges are excluded.
*/
-@RunWith(Parameterized.class)
-@Category({ IOTests.class, SmallTests.class })
+@HBaseParameterizedTestTemplate(name = "{0}")
+@org.junit.jupiter.api.Tag(IOTests.TAG)
+@org.junit.jupiter.api.Tag(SmallTests.TAG)
public class TestScannerSelectionUsingKeyRange {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestScannerSelectionUsingKeyRange.class);
-
- private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
+ private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static TableName TABLE = TableName.valueOf("myTable");
private static String FAMILY = "myCF";
private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY);
@@ -78,13 +71,12 @@ public class TestScannerSelectionUsingKeyRange {
private BloomType bloomType;
private int expectedCount;
- @Parameters
- public static Collection parameters() {
- List params = new ArrayList<>();
+ public static Stream parameters() {
+ List params = new ArrayList<>();
for (Object type : TYPE_COUNT.keySet()) {
- params.add(new Object[] { type, TYPE_COUNT.get(type) });
+ params.add(Arguments.of(type, TYPE_COUNT.get(type)));
}
- return params;
+ return params.stream();
}
public TestScannerSelectionUsingKeyRange(Object type, Object count) {
@@ -92,12 +84,12 @@ public TestScannerSelectionUsingKeyRange(Object type, Object count) {
expectedCount = (Integer) count;
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.cleanupTestDir();
}
- @Test
+ @TestTemplate
public void testScannerSelection() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration();
conf.setInt("hbase.hstore.compactionThreshold", 10000);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index 6a9667bd8912..612910e21b78 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -17,16 +17,16 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.Set;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@@ -43,26 +43,20 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test the optimization that does not scan files where all timestamps are expired.
*/
-@RunWith(Parameterized.class)
-@Category({ IOTests.class, LargeTests.class })
+@HBaseParameterizedTestTemplate(name = "numFreshFiles={0}")
+@Tag(IOTests.TAG)
+@Tag(LargeTests.TAG)
public class TestScannerSelectionUsingTTL {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestScannerSelectionUsingTTL.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestScannerSelectionUsingTTL.class);
private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU();
@@ -79,13 +73,12 @@ public class TestScannerSelectionUsingTTL {
public final int numFreshFiles, totalNumFiles;
- @Parameters
- public static Collection parameters() {
- List params = new ArrayList<>();
+ public static Stream parameters() {
+ List params = new ArrayList<>();
for (int numFreshFiles = 1; numFreshFiles <= 3; ++numFreshFiles) {
- params.add(new Object[] { numFreshFiles });
+ params.add(Arguments.of(numFreshFiles));
}
- return params;
+ return params.stream();
}
public TestScannerSelectionUsingTTL(int numFreshFiles) {
@@ -93,7 +86,7 @@ public TestScannerSelectionUsingTTL(int numFreshFiles) {
this.totalNumFiles = numFreshFiles + NUM_EXPIRED_FILES;
}
- @Test
+ @TestTemplate
public void testScannerSelection() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration();
conf.setBoolean("hbase.store.delete.expired.storefile", false);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
index 6fe90105f816..13153f6d0534 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Random;
@@ -27,7 +27,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -39,19 +38,15 @@
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.BloomFilterUtil;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestSeekBeforeWithInlineBlocks {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSeekBeforeWithInlineBlocks.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestSeekBeforeWithInlineBlocks.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -164,19 +159,19 @@ public void testMultiIndexLevelRandomHFileWithBlooms() throws IOException {
}
private void checkSeekBefore(Cell[] cells, HFileScanner scanner, int i) throws IOException {
- assertEquals(
- "Failed to seek to the key before #" + i + " (" + CellUtil.getCellKeyAsString(cells[i]) + ")",
- true, scanner.seekBefore(cells[i]));
+ assertEquals(true, scanner.seekBefore(cells[i]), "Failed to seek to the key before #" + i + " ("
+ + CellUtil.getCellKeyAsString(cells[i]) + ")");
}
private void checkNoSeekBefore(Cell[] cells, HFileScanner scanner, int i) throws IOException {
- assertEquals("Incorrectly succeeded in seeking to before first key ("
- + CellUtil.getCellKeyAsString(cells[i]) + ")", false, scanner.seekBefore(cells[i]));
+ assertEquals(false, scanner.seekBefore(cells[i]),
+ "Incorrectly succeeded in seeking to before first key ("
+ + CellUtil.getCellKeyAsString(cells[i]) + ")");
}
/** Check a key/value pair after it was read by the reader */
private void checkCell(Cell expected, Cell actual) {
- assertTrue(String.format("Expected key %s, but was %s", CellUtil.getCellKeyAsString(expected),
- CellUtil.getCellKeyAsString(actual)), CellUtil.equals(expected, actual));
+ assertTrue(CellUtil.equals(expected, actual), String.format("Expected key %s, but was %s",
+ CellUtil.getCellKeyAsString(expected), CellUtil.getCellKeyAsString(actual)));
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
index 8ddbd95679e5..9eddea90781b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
@@ -17,15 +17,15 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Iterator;
import java.util.List;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -33,7 +33,7 @@
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -43,34 +43,31 @@
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test {@link HFileScanner#seekTo(Cell)} and its variants.
*/
-@Category({ IOTests.class, SmallTests.class })
-@RunWith(Parameterized.class)
+@org.junit.jupiter.api.Tag(IOTests.TAG)
+@org.junit.jupiter.api.Tag(SmallTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: encoding={0}")
public class TestSeekTo {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSeekTo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSeekTo.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final DataBlockEncoding encoding;
- @Parameters
- public static Collection parameters() {
- List paramList = new ArrayList<>();
+ public static Stream parameters() {
+ List paramList = new ArrayList<>();
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
- paramList.add(new Object[] { encoding });
+ paramList.add(Arguments.of(encoding));
}
- return paramList;
+ return paramList.stream();
}
static boolean switchKVs = false;
@@ -79,7 +76,7 @@ public TestSeekTo(DataBlockEncoding encoding) {
this.encoding = encoding;
}
- @Before
+ @BeforeEach
public void setUp() {
// reset
switchKVs = false;
@@ -137,7 +134,7 @@ Path makeNewFile(TagUsage tagUsage) throws IOException {
return ncTFile;
}
- @Test
+ @TestTemplate
public void testSeekBefore() throws Exception {
testSeekBeforeInternals(TagUsage.NO_TAG);
testSeekBeforeInternals(TagUsage.ONLY_TAG);
@@ -195,7 +192,7 @@ protected void deleteTestDir(FileSystem fs) throws IOException {
}
}
- @Test
+ @TestTemplate
public void testSeekBeforeWithReSeekTo() throws Exception {
testSeekBeforeWithReSeekToInternals(TagUsage.NO_TAG);
testSeekBeforeWithReSeekToInternals(TagUsage.ONLY_TAG);
@@ -288,7 +285,7 @@ protected void testSeekBeforeWithReSeekToInternals(TagUsage tagUsage) throws IOE
deleteTestDir(fs);
}
- @Test
+ @TestTemplate
public void testSeekTo() throws Exception {
testSeekToInternals(TagUsage.NO_TAG);
testSeekToInternals(TagUsage.ONLY_TAG);
@@ -320,7 +317,7 @@ protected void testSeekToInternals(TagUsage tagUsage) throws IOException {
deleteTestDir(fs);
}
- @Test
+ @TestTemplate
public void testBlockContainingKey() throws Exception {
testBlockContainingKeyInternals(TagUsage.NO_TAG);
testBlockContainingKeyInternals(TagUsage.ONLY_TAG);
@@ -333,7 +330,7 @@ protected void testBlockContainingKeyInternals(TagUsage tagUsage) throws IOExcep
Configuration conf = TEST_UTIL.getConfiguration();
HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);
HFileBlockIndex.BlockIndexReader blockIndexReader = reader.getDataBlockIndexReader();
- System.out.println(blockIndexReader.toString());
+ LOG.info(blockIndexReader.toString());
// falls before the start of the file.
assertEquals(-1, blockIndexReader.rootBlockContainingKey(toKV("a", tagUsage)));
assertEquals(0, blockIndexReader.rootBlockContainingKey(toKV("c", tagUsage)));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java
index 3c6b4647ef51..81a45b55bcd8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestTinyLfuBlockCache.java
@@ -17,33 +17,28 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.ByteBuffer;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ClassSize;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Tests the concurrent TinyLfuBlockCache.
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestTinyLfuBlockCache {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestTinyLfuBlockCache.class);
-
@Test
public void testCacheSimple() throws Exception {
@@ -82,8 +77,8 @@ public void testCacheSimple() throws Exception {
for (CachedItem block : blocks) {
cache.cacheBlock(block.cacheKey, block);
}
- assertEquals("Cache should ignore cache requests for blocks already in cache",
- expectedBlockCount, cache.getBlockCount());
+ assertEquals(expectedBlockCount, cache.getBlockCount(),
+ "Cache should ignore cache requests for blocks already in cache");
// Verify correctly calculated cache heap size
assertEquals(expectedCacheSize, cache.heapSize());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
index 61959e85ee87..b41185ac50d7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
@@ -30,12 +30,13 @@
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.MULTI_FACTOR_CONFIG_NAME;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.QUEUE_ADDITION_WAIT_TIME;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.SINGLE_FACTOR_CONFIG_NAME;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -53,10 +54,11 @@
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Waiter;
@@ -84,14 +86,11 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -103,31 +102,29 @@
*
* Tests will ensure that blocks' data correctness under several threads concurrency
*/
-@RunWith(Parameterized.class)
-@Category({ IOTests.class, LargeTests.class })
+@Tag(IOTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: blockSize={0}, bucketSizes={1}")
public class TestBucketCache {
private static final Logger LOG = LoggerFactory.getLogger(TestBucketCache.class);
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBucketCache.class);
-
- @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}")
- public static Iterable data() {
- return Arrays.asList(new Object[][] { { 8192, null }, // TODO: why is 8k the default blocksize
- // for these tests?
- { 16 * 1024,
+ public static Stream parameters() {
+ // TODO: why is 8k the default blocksize for these tests?
+ return Stream.of(Arguments.of(8192, null),
+ Arguments.of(16 * 1024,
new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024,
28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024,
- 128 * 1024 + 1024 } } });
+ 128 * 1024 + 1024 }));
}
- @Parameterized.Parameter(0)
- public int constructedBlockSize;
+ private final int constructedBlockSize;
+ private final int[] constructedBlockSizes;
- @Parameterized.Parameter(1)
- public int[] constructedBlockSizes;
+ public TestBucketCache(int constructedBlockSize, int[] constructedBlockSizes) {
+ this.constructedBlockSize = constructedBlockSize;
+ this.constructedBlockSizes = constructedBlockSizes;
+ }
BucketCache cache;
final int CACHE_SIZE = 1000000;
@@ -162,13 +159,13 @@ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
}
}
- @Before
+ @BeforeEach
public void setup() throws IOException {
cache = new MockedBucketCache(ioEngineName, capacitySize, constructedBlockSize,
constructedBlockSizes, writeThreads, writerQLen, null);
}
- @After
+ @AfterEach
public void tearDown() {
cache.shutdown();
}
@@ -191,7 +188,7 @@ private static T randFrom(List a) {
return a.get(ThreadLocalRandom.current().nextInt(a.size()));
}
- @Test
+ @TestTemplate
public void testBucketAllocator() throws BucketAllocatorException {
BucketAllocator mAllocator = cache.getAllocator();
/*
@@ -218,7 +215,7 @@ public void testBucketAllocator() throws BucketAllocatorException {
for (Integer blockSize : BLOCKSIZES) {
BucketSizeInfo bucketSizeInfo = mAllocator.roundUpToBucketSizeInfo(blockSize);
IndexStatistics indexStatistics = bucketSizeInfo.statistics();
- assertEquals("unexpected freeCount for " + bucketSizeInfo, 0, indexStatistics.freeCount());
+ assertEquals(0, indexStatistics.freeCount(), "unexpected freeCount for " + bucketSizeInfo);
// we know the block sizes above are multiples of 1024, but default bucket sizes give an
// additional 1024 on top of that so this counts towards fragmentation in our test
@@ -236,17 +233,17 @@ public void testBucketAllocator() throws BucketAllocatorException {
assertEquals(0, mAllocator.getUsedSize());
}
- @Test
+ @TestTemplate
public void testCacheSimple() throws Exception {
CacheTestUtils.testCacheSimple(cache, BLOCK_SIZE, NUM_QUERIES);
}
- @Test
+ @TestTemplate
public void testCacheMultiThreadedSingleKey() throws Exception {
CacheTestUtils.hammerSingleKey(cache, 2 * NUM_THREADS, 2 * NUM_QUERIES);
}
- @Test
+ @TestTemplate
public void testHeapSizeChanges() throws Exception {
cache.stopWriterThreads();
CacheTestUtils.testHeapSizeChanges(cache, BLOCK_SIZE);
@@ -273,7 +270,7 @@ private void cacheAndWaitUntilFlushedToBucket(BucketCache cache, BlockCacheKey c
waitUntilFlushedToBucket(cache, cacheKey);
}
- @Test
+ @TestTemplate
public void testMemoryLeak() throws Exception {
final BlockCacheKey cacheKey = new BlockCacheKey("dummy", 1L);
cacheAndWaitUntilFlushedToBucket(cache, cacheKey,
@@ -315,10 +312,10 @@ public void run() {
*/
assertEquals(1L, cache.getBlockCount());
assertTrue(cache.getCurrentSize() > 0L);
- assertTrue("We should have a block!", cache.iterator().hasNext());
+ assertTrue(cache.iterator().hasNext(), "We should have a block!");
}
- @Test
+ @TestTemplate
public void testRetrieveFromFile() throws Exception {
Path testDir = createAndGetTestDir();
String ioEngineName = "file:" + testDir + "/bucket.cache";
@@ -339,14 +336,14 @@ public void testRetrieveFromFile() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testRetrieveFromMMap() throws Exception {
final Path testDir = createAndGetTestDir();
final String ioEngineName = "mmap:" + testDir + "/bucket.cache";
testRetrievalUtils(testDir, ioEngineName);
}
- @Test
+ @TestTemplate
public void testRetrieveFromPMem() throws Exception {
final Path testDir = createAndGetTestDir();
final String ioEngineName = "pmem:" + testDir + "/bucket.cache";
@@ -403,20 +400,20 @@ private void testRetrievalUtils(Path testDir, String ioEngineName)
assertTrue(new File(persistencePath).exists());
}
- @Test
+ @TestTemplate
public void testRetrieveUnsupportedIOE() throws Exception {
try {
final Path testDir = createAndGetTestDir();
final String ioEngineName = testDir + "/bucket.cache";
testRetrievalUtils(testDir, ioEngineName);
- Assert.fail("Should have thrown IllegalArgumentException because of unsupported IOEngine!!");
+ fail("Should have thrown IllegalArgumentException because of unsupported IOEngine!!");
} catch (IllegalArgumentException e) {
- Assert.assertEquals("Don't understand io engine name for cache- prefix with file:, "
+ assertEquals("Don't understand io engine name for cache- prefix with file:, "
+ "files:, mmap: or offheap", e.getMessage());
}
}
- @Test
+ @TestTemplate
public void testRetrieveFromMultipleFiles() throws Exception {
final Path testDirInitial = createAndGetTestDir();
final Path newTestDir = new HBaseTestingUtility().getDataTestDir();
@@ -441,7 +438,7 @@ public void testRetrieveFromMultipleFiles() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testRetrieveFromFileWithoutPersistence() throws Exception {
BucketCache bucketCache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize,
constructedBlockSizes, writeThreads, writerQLen, null);
@@ -472,7 +469,7 @@ public void testRetrieveFromFileWithoutPersistence() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testBucketAllocatorLargeBuckets() throws BucketAllocatorException {
long availableSpace = 20 * 1024L * 1024 * 1024;
int[] bucketSizes = new int[] { 1024, 1024 * 1024, 1024 * 1024 * 1024 };
@@ -480,7 +477,7 @@ public void testBucketAllocatorLargeBuckets() throws BucketAllocatorException {
assertTrue(allocator.getBuckets().length > 0);
}
- @Test
+ @TestTemplate
public void testGetPartitionSize() throws IOException {
// Test default values
validateGetPartitionSize(cache, DEFAULT_SINGLE_FACTOR, DEFAULT_MIN_FACTOR);
@@ -500,7 +497,7 @@ public void testGetPartitionSize() throws IOException {
validateGetPartitionSize(cache, 0.2f, 0.5f);
}
- @Test
+ @TestTemplate
public void testCacheSizeCapacity() throws IOException {
// Test cache capacity (capacity / blockSize) < Integer.MAX_VALUE
validateGetPartitionSize(cache, DEFAULT_SINGLE_FACTOR, DEFAULT_MIN_FACTOR);
@@ -512,13 +509,13 @@ public void testCacheSizeCapacity() throws IOException {
try {
new BucketCache(ioEngineName, Long.MAX_VALUE, 1, constructedBlockSizes, writeThreads,
writerQLen, null, 100, conf);
- Assert.fail("Should have thrown IllegalArgumentException because of large cache capacity!");
+ fail("Should have thrown IllegalArgumentException because of large cache capacity!");
} catch (IllegalArgumentException e) {
- Assert.assertEquals("Cache capacity is too large, only support 32TB now", e.getMessage());
+ assertEquals("Cache capacity is too large, only support 32TB now", e.getMessage());
}
}
- @Test
+ @TestTemplate
public void testValidBucketCacheConfigs() throws IOException {
Configuration conf = HBaseConfiguration.create();
conf.setFloat(ACCEPT_FACTOR_CONFIG_NAME, 0.9f);
@@ -532,20 +529,20 @@ public void testValidBucketCacheConfigs() throws IOException {
constructedBlockSizes, writeThreads, writerQLen, null, 100, conf);
assertTrue(cache.waitForCacheInitialization(10000));
- assertEquals(ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", 0.9f,
- cache.getAcceptableFactor(), 0);
- assertEquals(MIN_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f, cache.getMinFactor(), 0);
- assertEquals(EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f,
- cache.getExtraFreeFactor(), 0);
- assertEquals(SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.1f, cache.getSingleFactor(),
- 0);
- assertEquals(MULTI_FACTOR_CONFIG_NAME + " failed to propagate.", 0.7f, cache.getMultiFactor(),
- 0);
- assertEquals(MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.", 0.2f, cache.getMemoryFactor(),
- 0);
+ assertEquals(0.9f, cache.getAcceptableFactor(), 0,
+ ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.");
+ assertEquals(0.5f, cache.getMinFactor(), 0, MIN_FACTOR_CONFIG_NAME + " failed to propagate.");
+ assertEquals(0.5f, cache.getExtraFreeFactor(), 0,
+ EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.");
+ assertEquals(0.1f, cache.getSingleFactor(), 0,
+ SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.");
+ assertEquals(0.7f, cache.getMultiFactor(), 0,
+ MULTI_FACTOR_CONFIG_NAME + " failed to propagate.");
+ assertEquals(0.2f, cache.getMemoryFactor(), 0,
+ MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.");
}
- @Test
+ @TestTemplate
public void testInvalidAcceptFactorConfig() throws IOException {
float[] configValues = { -1f, 0.2f, 0.86f, 1.05f };
boolean[] expectedOutcomes = { false, false, true, false };
@@ -554,7 +551,7 @@ public void testInvalidAcceptFactorConfig() throws IOException {
checkConfigValues(conf, configMappings, expectedOutcomes);
}
- @Test
+ @TestTemplate
public void testInvalidMinFactorConfig() throws IOException {
float[] configValues = { -1f, 0f, 0.96f, 1.05f };
// throws due to <0, in expected range, minFactor > acceptableFactor, > 1.0
@@ -564,7 +561,7 @@ public void testInvalidMinFactorConfig() throws IOException {
checkConfigValues(conf, configMappings, expectedOutcomes);
}
- @Test
+ @TestTemplate
public void testInvalidExtraFreeFactorConfig() throws IOException {
float[] configValues = { -1f, 0f, 0.2f, 1.05f };
// throws due to <0, in expected range, in expected range, config can be > 1.0
@@ -575,7 +572,7 @@ public void testInvalidExtraFreeFactorConfig() throws IOException {
checkConfigValues(conf, configMappings, expectedOutcomes);
}
- @Test
+ @TestTemplate
public void testInvalidCacheSplitFactorConfig() throws IOException {
float[] singleFactorConfigValues = { 0.2f, 0f, -0.2f, 1f };
float[] multiFactorConfigValues = { 0.4f, 0f, 1f, .05f };
@@ -602,11 +599,11 @@ private void checkConfigValues(Configuration conf, Map configMa
BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize,
constructedBlockSizes, writeThreads, writerQLen, null, 100, conf);
assertTrue(cache.waitForCacheInitialization(10000));
- assertTrue("Created BucketCache and expected it to succeed: " + expectSuccess[i]
- + ", but it actually was: " + !expectSuccess[i], expectSuccess[i]);
+ assertTrue(expectSuccess[i], "Created BucketCache and expected it to succeed: "
+ + expectSuccess[i] + ", but it actually was: " + !expectSuccess[i]);
} catch (IllegalArgumentException e) {
- assertFalse("Created BucketCache and expected it to succeed: " + expectSuccess[i]
- + ", but it actually was: " + !expectSuccess[i], expectSuccess[i]);
+ assertFalse(expectSuccess[i], "Created BucketCache and expected it to succeed: "
+ + expectSuccess[i] + ", but it actually was: " + !expectSuccess[i]);
}
}
}
@@ -618,7 +615,7 @@ private void validateGetPartitionSize(BucketCache bucketCache, float partitionFa
assertEquals(expectedOutput, bucketCache.getPartitionSize(partitionFactor));
}
- @Test
+ @TestTemplate
public void testOffsetProducesPositiveOutput() {
// This number is picked because it produces negative output if the values isn't ensured to be
// positive. See HBASE-18757 for more information.
@@ -629,7 +626,7 @@ public void testOffsetProducesPositiveOutput() {
assertEquals(testValue, bucketEntry.offset());
}
- @Test
+ @TestTemplate
public void testEvictionCount() throws InterruptedException {
int size = 100;
int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
@@ -680,7 +677,7 @@ public void testEvictionCount() throws InterruptedException {
assertEquals(1, cache.getStats().getEvictionCount());
}
- @Test
+ @TestTemplate
public void testCacheBlockNextBlockMetadataMissing() throws Exception {
int size = 100;
int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
@@ -738,7 +735,7 @@ public void testCacheBlockNextBlockMetadataMissing() throws Exception {
assertEquals(1, blockWithoutNextBlockMetadata.getBufferReadOnly().refCnt());
}
- @Test
+ @TestTemplate
public void testRAMCache() {
int size = 100;
int length = HConstants.HFILEBLOCK_HEADER_SIZE + size;
@@ -777,7 +774,7 @@ public void testRAMCache() {
assertEquals(1, ((HFileBlock) re2.getData()).getBufferReadOnly().refCnt());
}
- @Test
+ @TestTemplate
public void testFreeBlockWhenIOEngineWriteFailure() throws IOException {
// initialize an block.
int size = 100, offset = 20;
@@ -803,14 +800,14 @@ public void testFreeBlockWhenIOEngineWriteFailure() throws IOException {
BlockCacheKey key = new BlockCacheKey("dummy", 1L);
RAMQueueEntry re = new RAMQueueEntry(key, block, 1, true, false);
- Assert.assertEquals(0, allocator.getUsedSize());
+ assertEquals(0, allocator.getUsedSize());
try {
re.writeToCache(ioEngine, allocator, null, null,
ByteBuffer.allocate(HFileBlock.BLOCK_METADATA_SPACE));
- Assert.fail();
+ fail();
} catch (Exception e) {
}
- Assert.assertEquals(0, allocator.getUsedSize());
+ assertEquals(0, allocator.getUsedSize());
}
/**
@@ -818,7 +815,7 @@ public void testFreeBlockWhenIOEngineWriteFailure() throws IOException {
* could not be freed even if corresponding {@link HFileBlock} is evicted from
* {@link BucketCache}.
*/
- @Test
+ @TestTemplate
public void testFreeBucketEntryRestoredFromFile() throws Exception {
BucketCache bucketCache = null;
try {
@@ -868,7 +865,7 @@ public void testFreeBucketEntryRestoredFromFile() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testBlockAdditionWaitWhenCache() throws Exception {
BucketCache bucketCache = null;
try {
@@ -931,7 +928,7 @@ public void testBlockAdditionWaitWhenCache() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testOnConfigurationChange() throws Exception {
BucketCache bucketCache = null;
try {
@@ -974,7 +971,7 @@ public void testOnConfigurationChange() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testNotifyFileCachingCompletedSuccess() throws Exception {
BucketCache bucketCache = null;
try {
@@ -998,7 +995,7 @@ public void testNotifyFileCachingCompletedSuccess() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testNotifyFileCachingCompletedForEncodedDataSuccess() throws Exception {
BucketCache bucketCache = null;
try {
@@ -1022,7 +1019,7 @@ public void testNotifyFileCachingCompletedForEncodedDataSuccess() throws Excepti
}
}
- @Test
+ @TestTemplate
public void testNotifyFileCachingCompletedNotAllCached() throws Exception {
BucketCache bucketCache = null;
try {
@@ -1060,7 +1057,7 @@ private BucketCache testNotifyFileCachingCompletedForTenBlocks(Path filePath,
return bucketCache;
}
- @Test
+ @TestTemplate
public void testEvictOrphansOutOfGracePeriod() throws Exception {
BucketCache bucketCache = testEvictOrphans(0);
assertEquals(10, bucketCache.getBackingMap().size());
@@ -1068,7 +1065,7 @@ public void testEvictOrphansOutOfGracePeriod() throws Exception {
.filter(key -> key.getHfileName().equals("testEvictOrphans-orphan")).count());
}
- @Test
+ @TestTemplate
public void testEvictOrphansWithinGracePeriod() throws Exception {
BucketCache bucketCache = testEvictOrphans(60 * 60 * 1000L);
assertEquals(18, bucketCache.getBackingMap().size());
@@ -1119,7 +1116,7 @@ private BucketCache testEvictOrphans(long orphanEvictionGracePeriod) throws Exce
return bucketCache;
}
- @Test
+ @TestTemplate
public void testBlockPriority() throws Exception {
HFileBlockPair block = CacheTestUtils.generateHFileBlocks(BLOCK_SIZE, 1)[0];
cacheAndWaitUntilFlushedToBucket(cache, block.getBlockName(), block.getBlock(), true);
@@ -1128,7 +1125,7 @@ public void testBlockPriority() throws Exception {
assertEquals(cache.backingMap.get(block.getBlockName()).getPriority(), BlockPriority.MULTI);
}
- @Test
+ @TestTemplate
public void testIOTimePerHitReturnsZeroWhenNoHits()
throws NoSuchFieldException, IllegalAccessException {
CacheStats cacheStats = cache.getStats();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java
index b31c3706dd7e..a3c59e273054 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheEmptyPersistence.java
@@ -19,24 +19,23 @@
import static org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.FileOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Tests for HBASE-29857: BucketCache recovery should gracefully handle empty or truncated
@@ -46,13 +45,10 @@
* magic bytes without actual cache data. The fix adds null checks that throw IOException instead of
* allowing NullPointerException to propagate.
*/
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
+@Tag(RegionServerTests.TAG)
public class TestBucketCacheEmptyPersistence {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBucketCacheEmptyPersistence.class);
-
private static final long CAPACITY_SIZE = 32 * 1024 * 1024;
private static final int WRITE_THREADS = BucketCache.DEFAULT_WRITER_THREADS;
private static final int WRITER_QUEUE_LEN = BucketCache.DEFAULT_WRITER_QUEUE_ITEMS;
@@ -78,7 +74,7 @@ public void testEmptyPersistenceFileNonChunkedFormat() throws Exception {
try (FileOutputStream fos = new FileOutputStream(persistenceFile)) {
fos.write(ProtobufMagic.PB_MAGIC);
}
- assertTrue("Persistence file should exist", persistenceFile.exists());
+ assertTrue(persistenceFile.exists(), "Persistence file should exist");
int[] bucketSizes = new int[] { 8 * 1024 + 1024 };
@@ -88,12 +84,12 @@ public void testEmptyPersistenceFileNonChunkedFormat() throws Exception {
WRITE_THREADS, WRITER_QUEUE_LEN, persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, conf);
// Cache should initialize successfully (not hang or throw)
- assertTrue("Cache should initialize successfully after recovering from empty file",
- bucketCache.waitForCacheInitialization(10000));
+ assertTrue(bucketCache.waitForCacheInitialization(10000),
+ "Cache should initialize successfully after recovering from empty file");
// Verify the cache was reset (backing map should be empty since file had no valid data)
- assertEquals("Backing map should be empty after recovering from empty persistence file", 0,
- bucketCache.backingMap.size());
+ assertEquals(0, bucketCache.backingMap.size(),
+ "Backing map should be empty after recovering from empty persistence file");
// Verify the cache is usable - we can add and retrieve blocks
CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 1);
@@ -104,7 +100,7 @@ public void testEmptyPersistenceFileNonChunkedFormat() throws Exception {
// Verify block can be retrieved
Cacheable retrieved = bucketCache.getBlock(blocks[0].getBlockName(), false, false, false);
- assertNotNull("Should be able to retrieve cached block", retrieved);
+ assertNotNull(retrieved, "Should be able to retrieve cached block");
bucketCache.shutdown();
testUtil.cleanupTestDir();
@@ -131,7 +127,7 @@ public void testEmptyPersistenceFileChunkedFormat() throws Exception {
try (FileOutputStream fos = new FileOutputStream(persistenceFile)) {
fos.write(BucketProtoUtils.PB_MAGIC_V2);
}
- assertTrue("Persistence file should exist", persistenceFile.exists());
+ assertTrue(persistenceFile.exists(), "Persistence file should exist");
int[] bucketSizes = new int[] { 8 * 1024 + 1024 };
@@ -141,12 +137,12 @@ public void testEmptyPersistenceFileChunkedFormat() throws Exception {
WRITE_THREADS, WRITER_QUEUE_LEN, persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, conf);
// Cache should initialize successfully (not hang or throw)
- assertTrue("Cache should initialize successfully after recovering from empty file",
- bucketCache.waitForCacheInitialization(10000));
+ assertTrue(bucketCache.waitForCacheInitialization(10000),
+ "Cache should initialize successfully after recovering from empty file");
// Verify the cache was reset (backing map should be empty since file had no valid data)
- assertEquals("Backing map should be empty after recovering from empty persistence file", 0,
- bucketCache.backingMap.size());
+ assertEquals(0, bucketCache.backingMap.size(),
+ "Backing map should be empty after recovering from empty persistence file");
// Verify the cache is usable - we can add and retrieve blocks
CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 1);
@@ -157,7 +153,7 @@ public void testEmptyPersistenceFileChunkedFormat() throws Exception {
// Verify block can be retrieved
Cacheable retrieved = bucketCache.getBlock(blocks[0].getBlockName(), false, false, false);
- assertNotNull("Should be able to retrieve cached block", retrieved);
+ assertNotNull(retrieved, "Should be able to retrieve cached block");
bucketCache.shutdown();
testUtil.cleanupTestDir();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java
index 35a60ec93125..1f6235b8207c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCachePersister.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -29,7 +29,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.fs.HFileSystem;
@@ -45,22 +44,15 @@
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ IOTests.class, MediumTests.class })
+@Tag(IOTests.TAG)
+@Tag(MediumTests.TAG)
public class TestBucketCachePersister {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBucketCachePersister.class);
-
- public TestName name = new TestName();
-
public int constructedBlockSize = 16 * 1024;
private static final Logger LOG = LoggerFactory.getLogger(TestBucketCachePersister.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java
index 4ee3f37819f7..08e7e8ea69e4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCacheRefCnt.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -30,7 +30,6 @@
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
@@ -46,17 +45,13 @@
import org.apache.hadoop.hbase.nio.RefCnt;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestBucketCacheRefCnt {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBucketCacheRefCnt.class);
-
private static final String IO_ENGINE = "offheap";
private static final long CAPACITY_SIZE = 32 * 1024 * 1024;
private static final int BLOCK_SIZE = 1024;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
index facbe7c50d11..50763eeb0402 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
@@ -19,15 +19,14 @@
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
@@ -35,20 +34,16 @@
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.RAMQueueEntry;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestBucketWriterThread {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBucketWriterThread.class);
-
private BucketCache bc;
private BucketCache.WriterThread wt;
private BlockingQueue q;
@@ -75,7 +70,7 @@ protected void startWriterThreads() {
* Set up variables and get BucketCache and WriterThread into state where tests can manually
* control the running of WriterThread and BucketCache is empty.
*/
- @Before
+ @BeforeEach
public void setUp() throws Exception {
// Arbitrary capacity.
final int capacity = 16;
@@ -99,7 +94,7 @@ public void setUp() throws Exception {
assertTrue(q.isEmpty());
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
if (this.bc != null) this.bc.shutdown();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java
index b42e7be804db..04a0ef65af39 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java
@@ -17,9 +17,10 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
@@ -27,21 +28,16 @@
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Basic test for {@link ByteBufferIOEngine}
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestByteBufferIOEngine {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestByteBufferIOEngine.class);
-
/**
* Override the {@link BucketEntry} so that we can set an arbitrary offset.
*/
@@ -114,10 +110,10 @@ public void testByteBufferIOEngine() throws Exception {
BucketEntry be = createBucketEntry(offset, blockSize);
ioEngine.read(be);
ByteBuff dst = getByteBuff(be);
- Assert.assertEquals(src.remaining(), blockSize);
- Assert.assertEquals(dst.remaining(), blockSize);
- Assert.assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst,
- dst.position(), dst.remaining()));
+ assertEquals(src.remaining(), blockSize);
+ assertEquals(dst.remaining(), blockSize);
+ assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst, dst.position(),
+ dst.remaining()));
}
assert testOffsetAtStartNum == 0;
assert testOffsetAtEndNum == 0;
@@ -189,10 +185,10 @@ public void testByteBufferIOEngineWithMBB() throws Exception {
BucketEntry be = createBucketEntry(offset, blockSize);
ioEngine.read(be);
ByteBuff dst = getByteBuff(be);
- Assert.assertEquals(src.remaining(), blockSize);
- Assert.assertEquals(dst.remaining(), blockSize);
- Assert.assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst,
- dst.position(), dst.remaining()));
+ assertEquals(src.remaining(), blockSize);
+ assertEquals(dst.remaining(), blockSize);
+ assertEquals(0, ByteBuff.compareTo(src, src.position(), src.remaining(), dst, dst.position(),
+ dst.remaining()));
}
assert testOffsetAtStartNum == 0;
assert testOffsetAtEndNum == 0;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java
index 8cde8c6347d3..dbd7ab047cf2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestExclusiveMemoryMmapEngine.java
@@ -19,28 +19,23 @@
import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.createBucketEntry;
import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.getByteBuff;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.File;
import java.io.IOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Basic test for {@link ExclusiveMemoryMmapIOEngine}
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestExclusiveMemoryMmapEngine {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestExclusiveMemoryMmapEngine.class);
-
@Test
public void testFileMmapEngine() throws IOException {
int size = 2 * 1024 * 1024; // 2 MB
@@ -63,10 +58,9 @@ public void testFileMmapEngine() throws IOException {
fileMmapEngine.read(be);
ByteBuff dst = getByteBuff(be);
- Assert.assertEquals(src.remaining(), len);
- Assert.assertEquals(dst.remaining(), len);
- Assert.assertEquals(0,
- ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining()));
+ assertEquals(src.remaining(), len);
+ assertEquals(dst.remaining(), len);
+ assertEquals(0, ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining()));
}
} finally {
File file = new File(filePath);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index af70c61c12c4..f19d13d8490e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -19,11 +19,11 @@
import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.createBucketEntry;
import static org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.getByteBuff;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.File;
import java.io.IOException;
@@ -31,18 +31,15 @@
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.List;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.RefCnt;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -50,13 +47,10 @@
/**
* Basic test for {@link FileIOEngine}
*/
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestFileIOEngine {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestFileIOEngine.class);
-
private static final long TOTAL_CAPACITY = 6 * 1024 * 1024; // 6 MB
private static final String[] FILE_PATHS =
{ "testFileIOEngine1", "testFileIOEngine2", "testFileIOEngine3" };
@@ -81,12 +75,12 @@ public class TestFileIOEngine {
boundaryStopPositions.add(SIZE_PER_FILE * FILE_PATHS.length - 1);
}
- @Before
+ @BeforeEach
public void setUp() throws IOException {
fileIOEngine = new FileIOEngine(TOTAL_CAPACITY, false, FILE_PATHS);
}
- @After
+ @AfterEach
public void cleanUp() {
fileIOEngine.shutdown();
for (String filePath : FILE_PATHS) {
@@ -177,10 +171,9 @@ public void testClosedChannelException() throws IOException {
fileIOEngine.read(be);
ByteBuff dst = getByteBuff(be);
- Assert.assertEquals(src.remaining(), len);
- Assert.assertEquals(dst.remaining(), len);
- Assert.assertEquals(0,
- ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining()));
+ assertEquals(src.remaining(), len);
+ assertEquals(dst.remaining(), len);
+ assertEquals(0, ByteBuff.compareTo(src, pos, len, dst, dst.position(), dst.remaining()));
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java
index a2909c005fd4..7d981d793553 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestPrefetchPersistence.java
@@ -17,19 +17,19 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.fs.HFileSystem;
@@ -41,39 +41,33 @@
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@RunWith(Parameterized.class)
-@Category({ IOTests.class, LargeTests.class })
+@Tag(IOTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: blockSize={0}, bucketSizes={1}")
public class TestPrefetchPersistence {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestPrefetchPersistence.class);
- public TestName name = new TestName();
-
- @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}")
@SuppressWarnings("checkstyle:Indentation")
- public static Iterable data() {
- return Arrays.asList(new Object[][] { { 16 * 1024,
+ public static Stream parameters() {
+ return Stream.of(Arguments.of(16 * 1024,
new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024,
28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024,
- 128 * 1024 + 1024 } } });
+ 128 * 1024 + 1024 }));
}
- @Parameterized.Parameter(0)
- public int constructedBlockSize;
+ final int constructedBlockSize;
+ final int[] constructedBlockSizes;
- @Parameterized.Parameter(1)
- public int[] constructedBlockSizes;
+ public TestPrefetchPersistence(int constructedBlockSize, int[] constructedBlockSizes) {
+ this.constructedBlockSize = constructedBlockSize;
+ this.constructedBlockSizes = constructedBlockSizes;
+ }
private static final Logger LOG = LoggerFactory.getLogger(TestPrefetchPersistence.class);
@@ -95,7 +89,7 @@ public static Iterable data() {
final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS;
final int writerQLen = BucketCache.DEFAULT_WRITER_QUEUE_ITEMS;
- @Before
+ @BeforeEach
public void setup() throws IOException {
conf = TEST_UTIL.getConfiguration();
conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
@@ -104,7 +98,7 @@ public void setup() throws IOException {
fs = HFileSystem.get(conf);
}
- @Test
+ @TestTemplate
public void testPrefetchPersistence() throws Exception {
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java
index 58d9385f57e9..ac9f52b84a9e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRAMCache.java
@@ -17,10 +17,14 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
@@ -33,21 +37,16 @@
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ IOTests.class, SmallTests.class })
+@Tag(IOTests.TAG)
+@Tag(SmallTests.TAG)
public class TestRAMCache {
private static final Logger LOG = LoggerFactory.getLogger(TestRAMCache.class);
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRAMCache.class);
-
// Define a mock HFileBlock.
private static class MockHFileBlock extends HFileBlock {
@@ -92,8 +91,8 @@ public void testAtomicRAMCache() throws Exception {
new HFileContextBuilder().build(), ByteBuffAllocator.HEAP);
RAMQueueEntry re = new RAMQueueEntry(key, blk, 1, false, false);
- Assert.assertNull(cache.putIfAbsent(key, re));
- Assert.assertEquals(cache.putIfAbsent(key, re), re);
+ assertNull(cache.putIfAbsent(key, re));
+ assertEquals(cache.putIfAbsent(key, re), re);
CountDownLatch latch = new CountDownLatch(1);
blk.setLatch(latch);
@@ -116,11 +115,11 @@ public void testAtomicRAMCache() throws Exception {
});
t2.start();
Thread.sleep(200);
- Assert.assertFalse(removed.get());
+ assertFalse(removed.get());
latch.countDown();
Thread.sleep(200);
- Assert.assertTrue(removed.get());
- Assert.assertFalse(error.get());
+ assertTrue(removed.get());
+ assertFalse(error.get());
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java
index 5ae3343d21e6..afc2437c327e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java
@@ -22,32 +22,29 @@
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.EXTRA_FREE_FACTOR_CONFIG_NAME;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.MIN_FACTOR_CONFIG_NAME;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Basic test for check file's integrity before start BucketCache in fileIOEngine
*/
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
+@Tag(RegionServerTests.TAG)
public class TestRecoveryPersistentBucketCache {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRecoveryPersistentBucketCache.class);
final long capacitySize = 32 * 1024 * 1024;
final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
index 9ba9eba5c61b..fde6d46f34ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestVerifyBucketCacheFile.java
@@ -20,10 +20,10 @@
import static org.apache.hadoop.hbase.io.hfile.CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BACKING_MAP_PERSISTENCE_CHUNK_SIZE;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedWriter;
import java.io.File;
@@ -33,54 +33,47 @@
import java.nio.file.Files;
import java.nio.file.attribute.FileTime;
import java.time.Instant;
-import java.util.Arrays;
+import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
import org.apache.hadoop.hbase.io.hfile.Cacheable;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
/**
* Basic test for check file's integrity before start BucketCache in fileIOEngine
*/
-@RunWith(Parameterized.class)
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
+@Tag(RegionServerTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: blockSize={0}, bucketSizes={1}")
public class TestVerifyBucketCacheFile {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestVerifyBucketCacheFile.class);
-
- @Parameterized.Parameters(name = "{index}: blockSize={0}, bucketSizes={1}")
- public static Iterable data() {
- return Arrays.asList(new Object[][] { { 8192, null },
- { 16 * 1024,
+
+ public static Stream parameters() {
+ return Stream.of(Arguments.of(8192, null),
+ Arguments.of(16 * 1024,
new int[] { 2 * 1024 + 1024, 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024,
28 * 1024 + 1024, 32 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024,
- 128 * 1024 + 1024 } } });
+ 128 * 1024 + 1024 }));
}
- @Rule
- public TestName name = new TestName();
-
- @Parameterized.Parameter(0)
- public int constructedBlockSize;
+ private final int constructedBlockSize;
+ private final int[] constructedBlockSizes;
- @Parameterized.Parameter(1)
- public int[] constructedBlockSizes;
+ public TestVerifyBucketCacheFile(int constructedBlockSize, int[] constructedBlockSizes) {
+ this.constructedBlockSize = constructedBlockSize;
+ this.constructedBlockSizes = constructedBlockSizes;
+ }
final long capacitySize = 32 * 1024 * 1024;
final int writeThreads = BucketCache.DEFAULT_WRITER_THREADS;
@@ -96,7 +89,7 @@ public static Iterable data() {
* cache file and persistence file would be deleted before BucketCache start normally.
* @throws Exception the exception
*/
- @Test
+ @TestTemplate
public void testRetrieveFromFile() throws Exception {
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Path testDir = TEST_UTIL.getDataTestDir();
@@ -108,9 +101,9 @@ public void testRetrieveFromFile() throws Exception {
BucketCache bucketCache = null;
BucketCache recoveredBucketCache = null;
try {
- bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
- constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName());
+ bucketCache =
+ new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize,
+ constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
assertTrue(bucketCache.waitForCacheInitialization(10000));
long usedSize = bucketCache.getAllocator().getUsedSize();
assertEquals(0, usedSize);
@@ -126,9 +119,9 @@ public void testRetrieveFromFile() throws Exception {
// 1.persist cache to file
bucketCache.shutdown();
// restore cache from file
- bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
- constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName());
+ bucketCache =
+ new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize,
+ constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
assertTrue(bucketCache.waitForCacheInitialization(10000));
assertEquals(usedSize, bucketCache.getAllocator().getUsedSize());
// persist cache to file
@@ -139,9 +132,9 @@ public void testRetrieveFromFile() throws Exception {
FileSystems.getDefault().getPath(testDir.toString(), "bucket.cache");
assertTrue(Files.deleteIfExists(cacheFile));
// can't restore cache from file
- recoveredBucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
- constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName());
+ recoveredBucketCache =
+ new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, constructedBlockSize,
+ constructedBlockSizes, writeThreads, writerQLen, testDir + "/bucket.persistence");
assertTrue(recoveredBucketCache.waitForCacheInitialization(10000));
waitPersistentCacheValidation(conf, recoveredBucketCache);
assertEquals(0, recoveredBucketCache.getAllocator().getUsedSize());
@@ -157,14 +150,13 @@ public void testRetrieveFromFile() throws Exception {
recoveredBucketCache.shutdown();
// 3.delete backingMap persistence file
- final java.nio.file.Path mapFile = FileSystems.getDefault().getPath(testDir.toString(),
- "bucket.persistence" + name.getMethodName());
+ final java.nio.file.Path mapFile =
+ FileSystems.getDefault().getPath(testDir.toString(), "bucket.persistence");
assertTrue(Files.deleteIfExists(mapFile));
// can't restore cache from file
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION,
- conf);
+ testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf);
assertTrue(bucketCache.waitForCacheInitialization(10000));
waitPersistentCacheValidation(conf, bucketCache);
assertEquals(0, bucketCache.getAllocator().getUsedSize());
@@ -180,15 +172,14 @@ public void testRetrieveFromFile() throws Exception {
TEST_UTIL.cleanupTestDir();
}
- @Test
+ @TestTemplate
public void testRetrieveFromFileAfterDelete() throws Exception {
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Path testDir = TEST_UTIL.getDataTestDir();
TEST_UTIL.getTestFileSystem().mkdirs(testDir);
Configuration conf = TEST_UTIL.getConfiguration();
conf.setLong(CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY, 300);
- String mapFileName =
- testDir + "/bucket.persistence" + name.getMethodName() + EnvironmentEdgeManager.currentTime();
+ String mapFileName = testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime();
BucketCache bucketCache = null;
try {
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
@@ -233,7 +224,7 @@ public void testRetrieveFromFileAfterDelete() throws Exception {
* persistence file would be deleted before BucketCache start normally.
* @throws Exception the exception
*/
- @Test
+ @TestTemplate
public void testModifiedBucketCacheFileData() throws Exception {
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Path testDir = TEST_UTIL.getDataTestDir();
@@ -246,8 +237,7 @@ public void testModifiedBucketCacheFileData() throws Exception {
try {
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION,
- conf);
+ testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf);
assertTrue(bucketCache.waitForCacheInitialization(10000));
long usedSize = bucketCache.getAllocator().getUsedSize();
assertEquals(0, usedSize);
@@ -272,8 +262,7 @@ public void testModifiedBucketCacheFileData() throws Exception {
// can't restore cache from file
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION,
- conf);
+ testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf);
assertTrue(bucketCache.waitForCacheInitialization(10000));
waitPersistentCacheValidation(conf, bucketCache);
assertEquals(0, bucketCache.getAllocator().getUsedSize());
@@ -300,7 +289,7 @@ public void testModifiedBucketCacheFileData() throws Exception {
* recoverable from the cache.
* @throws Exception the exception
*/
- @Test
+ @TestTemplate
public void testModifiedBucketCacheFileTime() throws Exception {
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Path testDir = TEST_UTIL.getDataTestDir();
@@ -312,8 +301,7 @@ public void testModifiedBucketCacheFileTime() throws Exception {
try {
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION,
- conf);
+ testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf);
assertTrue(bucketCache.waitForCacheInitialization(10000));
long usedSize = bucketCache.getAllocator().getUsedSize();
assertEquals(0, usedSize);
@@ -338,8 +326,7 @@ public void testModifiedBucketCacheFileTime() throws Exception {
// can't restore cache from file
bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize,
constructedBlockSize, constructedBlockSizes, writeThreads, writerQLen,
- testDir + "/bucket.persistence" + name.getMethodName(), DEFAULT_ERROR_TOLERATION_DURATION,
- conf);
+ testDir + "/bucket.persistence", DEFAULT_ERROR_TOLERATION_DURATION, conf);
assertTrue(bucketCache.waitForCacheInitialization(10000));
waitPersistentCacheValidation(conf, bucketCache);
assertEquals(usedSize, bucketCache.getAllocator().getUsedSize());
@@ -360,7 +347,7 @@ public void testModifiedBucketCacheFileTime() throws Exception {
* corruption.
* @throws Exception the exception
*/
- @Test
+ @TestTemplate
public void testBucketCacheRecovery() throws Exception {
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Path testDir = TEST_UTIL.getDataTestDir();
@@ -368,8 +355,7 @@ public void testBucketCacheRecovery() throws Exception {
Configuration conf = HBaseConfiguration.create();
// Disables the persister thread by setting its interval to MAX_VALUE
conf.setLong(BUCKETCACHE_PERSIST_INTERVAL_KEY, Long.MAX_VALUE);
- String mapFileName =
- testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime() + name.getMethodName();
+ String mapFileName = testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime();
BucketCache bucketCache = null;
BucketCache newBucketCache = null;
try {
@@ -420,18 +406,18 @@ public void testBucketCacheRecovery() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testSingleChunk() throws Exception {
testChunkedBackingMapRecovery(5, 5);
}
- @Test
+ @TestTemplate
public void testCompletelyFilledChunks() throws Exception {
// Test where the all the chunks are complete with chunkSize entries
testChunkedBackingMapRecovery(5, 10);
}
- @Test
+ @TestTemplate
public void testPartiallyFilledChunks() throws Exception {
// Test where the last chunk is not completely filled.
testChunkedBackingMapRecovery(5, 13);
@@ -444,8 +430,7 @@ private void testChunkedBackingMapRecovery(int chunkSize, int numBlocks) throws
Configuration conf = HBaseConfiguration.create();
conf.setLong(BACKING_MAP_PERSISTENCE_CHUNK_SIZE, chunkSize);
- String mapFileName =
- testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime() + name.getMethodName();
+ String mapFileName = testDir + "/bucket.persistence" + EnvironmentEdgeManager.currentTime();
BucketCache bucketCache = null;
BucketCache newBucketCache = null;
try {
| | |