From 1a1860220b2670cedae59f4e0f906a7ebcf7d5ee Mon Sep 17 00:00:00 2001 From: liuxiaocs7 Date: Sun, 3 May 2026 22:14:25 +0800 Subject: [PATCH 1/2] HBASE-30138 Upgrade hbase-server to use junit5 Part16 --- .../hbase/regionserver/TestHMobStore.java | 127 ++++--- .../hbase/regionserver/TestHRegion.java | 323 +++++++++--------- .../regionserver/TestHRegionFileSystem.java | 56 ++- .../regionserver/TestHRegionOnCluster.java | 32 +- .../regionserver/TestHRegionReplayEvents.java | 61 ++-- .../TestHRegionServerBulkLoad.java | 37 +- .../regionserver/TestHRegionTracing.java | 43 ++- .../TestHRegionWithInMemoryFlush.java | 15 +- .../hadoop/hbase/regionserver/TestHStore.java | 179 +++++----- .../hbase/regionserver/TestHStoreFile.java | 72 ++-- .../regionserver/TestHdfsSnapshotHRegion.java | 28 +- .../regionserver/TestHeapMemoryManager.java | 21 +- .../TestInputStreamBlockDistribution.java | 29 +- .../regionserver/TestIsDeleteFailure.java | 38 +-- .../regionserver/TestJoinedScanners.java | 44 +-- .../hbase/regionserver/TestKeepDeletes.java | 74 ++-- .../hbase/regionserver/TestKeyValueHeap.java | 39 +-- .../regionserver/TestKeyValueScanFixture.java | 17 +- .../hbase/regionserver/TestLogRoller.java | 29 +- .../regionserver/TestMajorCompaction.java | 104 +++--- .../TestMaxResultsPerColumnFamily.java | 38 +-- .../regionserver/TestMemStoreChunkPool.java | 37 +- .../regionserver/TestMemStoreFlusher.java | 42 +-- .../hbase/regionserver/TestMemStoreLAB.java | 52 ++- .../TestMemStoreSegmentsIterator.java | 25 +- .../TestMemstoreLABWithoutPool.java | 35 +- .../TestMergesSplitsAddToTracker.java | 35 +- .../TestMetricsHeapMemoryManager.java | 21 +- .../hbase/regionserver/TestMetricsJvm.java | 32 +- .../hbase/regionserver/TestMetricsRegion.java | 13 +- .../regionserver/TestMetricsRegionServer.java | 37 +- .../TestMetricsRegionServerAggregate.java | 15 +- ...tricsRegionWrapperTableDescriptorHash.java | 27 +- .../TestMetricsTableAggregate.java | 23 +- .../TestMetricsTableMetricsMap.java | 25 +- .../TestMetricsTableRequests.java | 35 +- .../TestMetricsUserAggregate.java | 25 +- .../hbase/regionserver/TestMinVersions.java | 71 ++-- .../TestMiniBatchOperationInProgress.java | 19 +- .../regionserver/TestMinorCompaction.java | 46 ++- .../regionserver/TestMultiColumnScanner.java | 56 +-- ...umnScannerWithAlgoGZAndNoDataEncoding.java | 27 +- ...mnScannerWithAlgoGZAndUseDataEncoding.java | 27 +- ...olumnScannerWithNoneAndNoDataEncoding.java | 27 +- ...lumnScannerWithNoneAndUseDataEncoding.java | 27 +- .../regionserver/TestMultiLogThreshold.java | 49 ++- .../TestMultiVersionConcurrencyControl.java | 21 +- ...stMultiVersionConcurrencyControlBasic.java | 15 +- .../regionserver/TestMutateRowsRecovery.java | 35 +- .../TestNewVersionBehaviorFromClientSide.java | 49 ++- ...tCleanupCompactedFileWhenRegionWarmup.java | 33 +- 51 files changed, 1077 insertions(+), 1310 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java index db8636199574..6f82d63cd287 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.regionserver.Store.PRIORITY_USER; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.security.Key; @@ -42,13 +45,11 @@ import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -75,27 +76,19 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.WALFactory; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestHMobStore { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHMobStore.class); - public static final Logger LOG = LoggerFactory.getLogger(TestHMobStore.class); - @Rule - public TestName name = new TestName(); + private String name; private HMobStore store; private HRegion region; @@ -127,8 +120,9 @@ public class TestHMobStore { /** * Setup */ - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + this.name = testInfo.getTestMethod().get().getName(); qualifiers.add(qf1); qualifiers.add(qf3); qualifiers.add(qf5); @@ -198,7 +192,7 @@ private void init(Configuration conf, ColumnFamilyDescriptor cfd) throws IOExcep String targetPathName = MobUtils.formatDate(currentDate); byte[] referenceValue = Bytes.toBytes(targetPathName + Path.SEPARATOR + mobFilePath.getName()); - Tag tableNameTag = + org.apache.hadoop.hbase.Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName()); KeyValue kv1 = new KeyValue(row, family, qf1, Long.MAX_VALUE, referenceValue); KeyValue kv2 = new KeyValue(row, family, qf2, Long.MAX_VALUE, referenceValue); @@ -211,10 +205,11 @@ private void init(Configuration conf, ColumnFamilyDescriptor cfd) throws IOExcep /** * Getting data from memstore */ + @Test public void testGetFromMemStore() throws IOException { final Configuration conf = HBaseConfiguration.create(); - init(name.getMethodName(), conf, false); + init(name, conf, false); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, value), null); @@ -234,10 +229,10 @@ public void testGetFromMemStore() throws IOException { scanner.close(); // Compare - Assert.assertEquals(expected.size(), results.size()); + assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { // Verify the values - Assert.assertEquals(expected.get(i), results.get(i)); + assertEquals(expected.get(i), results.get(i)); } } @@ -247,7 +242,7 @@ public void testGetFromMemStore() throws IOException { @Test public void testGetFromFiles() throws IOException { final Configuration conf = TEST_UTIL.getConfiguration(); - init(name.getMethodName(), conf, false); + init(name, conf, false); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, value), null); @@ -277,9 +272,9 @@ public void testGetFromFiles() throws IOException { scanner.close(); // Compare - Assert.assertEquals(expected.size(), results.size()); + assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { - Assert.assertEquals(expected.get(i), results.get(i)); + assertEquals(expected.get(i), results.get(i)); } } @@ -289,7 +284,7 @@ public void testGetFromFiles() throws IOException { @Test public void testGetReferencesFromFiles() throws IOException { final Configuration conf = HBaseConfiguration.create(); - init(name.getMethodName(), conf, false); + init(name, conf, false); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, value), null); @@ -320,10 +315,10 @@ public void testGetReferencesFromFiles() throws IOException { scanner.close(); // Compare - Assert.assertEquals(expected.size(), results.size()); + assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { ExtendedCell cell = results.get(i); - Assert.assertTrue(MobUtils.isMobReferenceCell(cell)); + assertTrue(MobUtils.isMobReferenceCell(cell)); } } @@ -335,7 +330,7 @@ public void testGetFromMemStoreAndFiles() throws IOException { final Configuration conf = HBaseConfiguration.create(); - init(name.getMethodName(), conf, false); + init(name, conf, false); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, value), null); @@ -363,9 +358,9 @@ public void testGetFromMemStoreAndFiles() throws IOException { scanner.close(); // Compare - Assert.assertEquals(expected.size(), results.size()); + assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { - Assert.assertEquals(expected.get(i), results.get(i)); + assertEquals(expected.get(i), results.get(i)); } } @@ -377,7 +372,7 @@ public void testMobCellSizeThreshold() throws IOException { final Configuration conf = HBaseConfiguration.create(); ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(family) .setMobEnabled(true).setMobThreshold(100).setMaxVersions(4).build(); - init(name.getMethodName(), conf, cfd, false); + init(name, conf, cfd, false); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, value), null); @@ -408,34 +403,34 @@ public void testMobCellSizeThreshold() throws IOException { scanner.close(); // Compare - Assert.assertEquals(expected.size(), results.size()); + assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { ExtendedCell cell = results.get(i); // this is not mob reference cell. - Assert.assertFalse(MobUtils.isMobReferenceCell(cell)); - Assert.assertEquals(expected.get(i), results.get(i)); - Assert.assertEquals(100, store.getColumnFamilyDescriptor().getMobThreshold()); + assertFalse(MobUtils.isMobReferenceCell(cell)); + assertEquals(expected.get(i), results.get(i)); + assertEquals(100, store.getColumnFamilyDescriptor().getMobThreshold()); } } @Test public void testCommitFile() throws Exception { final Configuration conf = HBaseConfiguration.create(); - init(name.getMethodName(), conf, true); + init(name, conf, true); String targetPathName = MobUtils.formatDate(new Date()); Path targetPath = new Path(store.getPath(), (targetPathName + Path.SEPARATOR + mobFilePath.getName())); fs.delete(targetPath, true); - Assert.assertFalse(fs.exists(targetPath)); + assertFalse(fs.exists(targetPath)); // commit file store.commitFile(mobFilePath, targetPath); - Assert.assertTrue(fs.exists(targetPath)); + assertTrue(fs.exists(targetPath)); } @Test public void testResolve() throws Exception { final Configuration conf = HBaseConfiguration.create(); - init(name.getMethodName(), conf, true); + init(name, conf, true); String targetPathName = MobUtils.formatDate(currentDate); Path targetPath = new Path(store.getPath(), targetPathName); store.commitFile(mobFilePath, targetPath); @@ -444,9 +439,9 @@ public void testResolve() throws Exception { Cell resultCell2 = store.resolve(seekKey2, false).getCell(); Cell resultCell3 = store.resolve(seekKey3, false).getCell(); // compare - Assert.assertEquals(Bytes.toString(value), Bytes.toString(CellUtil.cloneValue(resultCell1))); - Assert.assertEquals(Bytes.toString(value), Bytes.toString(CellUtil.cloneValue(resultCell2))); - Assert.assertEquals(Bytes.toString(value2), Bytes.toString(CellUtil.cloneValue(resultCell3))); + assertEquals(Bytes.toString(value), Bytes.toString(CellUtil.cloneValue(resultCell1))); + assertEquals(Bytes.toString(value), Bytes.toString(CellUtil.cloneValue(resultCell2))); + assertEquals(Bytes.toString(value2), Bytes.toString(CellUtil.cloneValue(resultCell3))); } @Test @@ -466,9 +461,9 @@ public void testReversedMobStoreScannerGetFilesRead() throws IOException { private void doTestMobStoreScannerGetFilesRead(boolean reversed) throws IOException { // Setup: conf, root dir, and MOB store init (mob threshold causes large values to go to MOB). final Configuration conf = HBaseConfiguration.create(); - Path basedir = new Path(DIR + name.getMethodName()); + Path basedir = new Path(DIR + name); CommonFSUtils.setRootDir(conf, basedir); - init(name.getMethodName(), conf, false); + init(name, conf, false); // Add values above MOB threshold and flush so DefaultMobStoreFlusher creates mob file and refs. byte[] valueAboveThreshold = Bytes.toBytes("value"); // threshold in setup is 3 bytes @@ -493,8 +488,8 @@ private void doTestMobStoreScannerGetFilesRead(boolean reversed) throws IOExcept } } } - Assert.assertTrue("Should have at least one store file and one mob file", - expectedFilePaths.size() >= 2); + assertTrue(expectedFilePaths.size() >= 2, + "Should have at least one store file and one mob file"); // Build scan (optionally reversed) and target columns; get store scanner and verify type. Scan scan = new Scan(); @@ -511,18 +506,17 @@ private void doTestMobStoreScannerGetFilesRead(boolean reversed) throws IOExcept KeyValueScanner kvScanner = store.getScanner(scan, targetCols, 0); if (reversed) { - Assert.assertTrue("Store scanner should be ReversedMobStoreScanner", - kvScanner instanceof ReversedMobStoreScanner); + assertTrue(kvScanner instanceof ReversedMobStoreScanner, + "Store scanner should be ReversedMobStoreScanner"); } else { - Assert.assertTrue("Store scanner should be MobStoreScanner", - kvScanner instanceof MobStoreScanner); + assertTrue(kvScanner instanceof MobStoreScanner, "Store scanner should be MobStoreScanner"); } // Before close: getFilesRead must be empty; then drain scanner to resolve MOB refs. try { Set filesReadBeforeClose = kvScanner.getFilesRead(); - Assert.assertTrue("Should return empty set before closing", filesReadBeforeClose.isEmpty()); - Assert.assertEquals("Should have 0 files before closing", 0, filesReadBeforeClose.size()); + assertTrue(filesReadBeforeClose.isEmpty(), "Should return empty set before closing"); + assertEquals(0, filesReadBeforeClose.size(), "Should have 0 files before closing"); List results = new ArrayList<>(); InternalScanner storeScanner = (InternalScanner) kvScanner; @@ -532,18 +526,17 @@ private void doTestMobStoreScannerGetFilesRead(boolean reversed) throws IOExcept // Still before close: set must remain empty until scanner is closed. filesReadBeforeClose = kvScanner.getFilesRead(); - Assert.assertTrue("Should return empty set before closing even after reading", - filesReadBeforeClose.isEmpty()); + assertTrue(filesReadBeforeClose.isEmpty(), + "Should return empty set before closing even after reading"); } finally { kvScanner.close(); } // After close: set must contain exactly the expected store + MOB file paths. Set filesReadAfterClose = kvScanner.getFilesRead(); - Assert.assertEquals("Should have exact file count after closing", expectedFilePaths.size(), - filesReadAfterClose.size()); - Assert.assertEquals("Should contain all expected file paths", expectedFilePaths, - filesReadAfterClose); + assertEquals(expectedFilePaths.size(), filesReadAfterClose.size(), + "Should have exact file count after closing"); + assertEquals(expectedFilePaths, filesReadAfterClose, "Should contain all expected file paths"); } /** @@ -551,8 +544,8 @@ private void doTestMobStoreScannerGetFilesRead(boolean reversed) throws IOExcept */ private void flush(int storeFilesSize) throws IOException { flushStore(store, id++); - Assert.assertEquals(storeFilesSize, this.store.getStorefiles().size()); - Assert.assertEquals(0, ((AbstractMemStore) this.store.memstore).getActive().getCellsCount()); + assertEquals(storeFilesSize, this.store.getStorefiles().size()); + assertEquals(0, ((AbstractMemStore) this.store.memstore).getActive().getCellsCount()); } /** @@ -582,7 +575,7 @@ public void testMOBStoreEncryption() throws Exception { conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), cfKey)) .build(); - init(name.getMethodName(), conf, cfd, false); + init(name, conf, cfd, false); this.store.add(new KeyValue(row, family, qf1, 1, value), null); this.store.add(new KeyValue(row, family, qf2, 1, value), null); @@ -606,9 +599,9 @@ public void testMOBStoreEncryption() throws Exception { scanner.next(results); Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); - Assert.assertEquals(expected.size(), results.size()); + assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { - Assert.assertEquals(expected.get(i), results.get(i)); + assertEquals(expected.get(i), results.get(i)); } // Trigger major compaction @@ -616,7 +609,7 @@ public void testMOBStoreEncryption() throws Exception { Optional requestCompaction = this.store.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null); this.store.compact(requestCompaction.get(), NoLimitThroughputController.INSTANCE, null); - Assert.assertEquals(1, this.store.getStorefiles().size()); + assertEquals(1, this.store.getStorefiles().size()); // Check encryption after compaction checkMobHFileEncrytption(this.store.getStorefiles()); @@ -626,8 +619,8 @@ private void checkMobHFileEncrytption(Collection storefiles) { HStoreFile storeFile = storefiles.iterator().next(); HFile.Reader reader = storeFile.getReader().getHFileReader(); byte[] encryptionKey = reader.getTrailer().getEncryptionKey(); - Assert.assertTrue(null != encryptionKey); - Assert.assertTrue(reader.getFileContext().getEncryptionContext().getCipher().getName() + assertTrue(null != encryptionKey); + assertTrue(reader.getFileContext().getEncryptionContext().getCipher().getName() .equals(HConstants.CIPHER_AES)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index d5de6418a8bc..c138b0a448a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -21,14 +21,14 @@ import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1; import static org.apache.hadoop.hbase.HBaseTestingUtil.fam2; import static org.apache.hadoop.hbase.HBaseTestingUtil.fam3; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyString; @@ -86,7 +86,6 @@ import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -183,16 +182,12 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer; import org.apache.hadoop.hbase.wal.WALSplitUtil; import org.apache.hadoop.hbase.wal.WALStreamReader; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; @@ -218,22 +213,15 @@ * Basic stand-alone testing of HRegion. No clusters! A lot of the meta information for an HRegion * now lives inside other HRegions or in the HBaseMaster, so only basic testing is possible. */ -@Category({ VerySlowRegionServerTests.class, LargeTests.class }) +@Tag(VerySlowRegionServerTests.TAG) +@Tag(LargeTests.TAG) @SuppressWarnings("deprecation") public class TestHRegion { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegion.class); - // Do not spin up clusters in here. If you need to spin up a cluster, do it // over in TestHRegionOnCluster. private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class); - @Rule - public TestName name = new TestName(); - @Rule - public final ExpectedException thrown = ExpectedException.none(); - + private String name; private static final String COLUMN_FAMILY = "MyCF"; private static final byte[] COLUMN_FAMILY_BYTES = Bytes.toBytes(COLUMN_FAMILY); private static final EventLoopGroup GROUP = new NioEventLoopGroup(); @@ -261,19 +249,20 @@ public class TestHRegion { protected final MetricsAssertHelper metricsAssertHelper = CompatibilitySingletonFactory.getInstance(MetricsAssertHelper.class); - @Before - public void setup() throws IOException { + @BeforeEach + public void setup(TestInfo testInfo) throws IOException { + this.name = testInfo.getTestMethod().get().getName(); TEST_UTIL = new HBaseTestingUtil(); CONF = TEST_UTIL.getConfiguration(); NettyAsyncFSWALConfigHelper.setEventLoopConfig(CONF, GROUP, NioSocketChannel.class); dir = TEST_UTIL.getDataTestDir("TestHRegion").toString(); - method = name.getMethodName(); + method = name; tableName = TableName.valueOf(method); CONF.set(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, String.valueOf(0.09)); CONF.setLong(AbstractFSWAL.WAL_SYNC_TIMEOUT_MS, 10000); } - @After + @AfterEach public void tearDown() throws IOException { // Region may have been closed, but it is still no harm if we close it again here using HTU. HBaseTestingUtil.closeRegionAndWAL(region); @@ -285,6 +274,7 @@ public void tearDown() throws IOException { /** * Test that I can use the max flushed sequence id after the close. */ + @Test public void testSequenceId() throws IOException { region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES); @@ -393,10 +383,10 @@ protected void doSync(long txid, boolean forceSync) throws IOException { } catch (IOException ioe) { threwIOE = true; } finally { - assertTrue("The regionserver should have thrown an exception", threwIOE); + assertTrue(threwIOE, "The regionserver should have thrown an exception"); } MemStoreSize mss = store.getFlushableSize(); - assertTrue("flushable size should be zero, but it is " + mss, mss.getDataSize() == 0); + assertTrue(mss.getDataSize() == 0, "flushable size should be zero, but it is " + mss); } /** @@ -446,9 +436,9 @@ public void testMemstoreSizeAccountingWithFailedPostBatchMutate() throws IOExcep } catch (IOException expected) { } long expectedSize = onePutSize * 2; - assertEquals("memstoreSize should be incremented", expectedSize, region.getMemStoreDataSize()); - assertEquals("flushable size should be incremented", expectedSize, - store.getFlushableSize().getDataSize()); + assertEquals(expectedSize, region.getMemStoreDataSize(), "memstoreSize should be incremented"); + assertEquals(expectedSize, store.getFlushableSize().getDataSize(), + "flushable size should be incremented"); region.setCoprocessorHost(null); } @@ -502,7 +492,7 @@ public void testFlushSizeAccounting() throws Exception { public Object run() throws Exception { // Make sure it worked (above is sensitive to caching details in hadoop core) FileSystem fs = FileSystem.get(conf); - Assert.assertEquals(FaultyFileSystem.class, fs.getClass()); + assertEquals(FaultyFileSystem.class, fs.getClass()); FaultyFileSystem ffs = (FaultyFileSystem) fs; HRegion region = null; try { @@ -510,7 +500,7 @@ public Object run() throws Exception { region = initHRegion(tableName, null, null, CONF, false, Durability.SYNC_WAL, wal, COLUMN_FAMILY_BYTES); long size = region.getMemStoreDataSize(); - Assert.assertEquals(0, size); + assertEquals(0, size); // Put one item into memstore. Measure the size of one item in memstore. Put p1 = new Put(row); p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[]) null)); @@ -520,7 +510,7 @@ public Object run() throws Exception { try { LOG.info("Flushing"); region.flush(true); - Assert.fail("Didn't bubble up IOE!"); + fail("Didn't bubble up IOE!"); } catch (DroppedSnapshotException dse) { // What we are expecting region.closing.set(false); // this is needed for the rest of the test to work @@ -528,7 +518,7 @@ public Object run() throws Exception { // Make it so all writes succeed from here on out ffs.fault.set(false); // Check sizes. Should still be the one entry. - Assert.assertEquals(sizeOfOnePut, region.getMemStoreDataSize()); + assertEquals(sizeOfOnePut, region.getMemStoreDataSize()); // Now add two entries so that on this next flush that fails, we can see if we // subtract the right amount, the snapshot size only. Put p2 = new Put(row); @@ -536,13 +526,13 @@ public Object run() throws Exception { p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual3, 3, (byte[]) null)); region.put(p2); long expectedSize = sizeOfOnePut * 3; - Assert.assertEquals(expectedSize, region.getMemStoreDataSize()); + assertEquals(expectedSize, region.getMemStoreDataSize()); // Do a successful flush. It will clear the snapshot only. Thats how flushes work. // If already a snapshot, we clear it else we move the memstore to be snapshot and flush // it region.flush(true); // Make sure our memory accounting is right. - Assert.assertEquals(sizeOfOnePut * 2, region.getMemStoreDataSize()); + assertEquals(sizeOfOnePut * 2, region.getMemStoreDataSize()); } finally { HBaseTestingUtil.closeRegionAndWAL(region); } @@ -566,7 +556,7 @@ public void testCloseWithFailingFlush() throws Exception { public Object run() throws Exception { // Make sure it worked (above is sensitive to caching details in hadoop core) FileSystem fs = FileSystem.get(conf); - Assert.assertEquals(FaultyFileSystem.class, fs.getClass()); + assertEquals(FaultyFileSystem.class, fs.getClass()); FaultyFileSystem ffs = (FaultyFileSystem) fs; HRegion region = null; try { @@ -574,7 +564,7 @@ public Object run() throws Exception { region = initHRegion(tableName, null, null, CONF, false, Durability.SYNC_WAL, wal, COLUMN_FAMILY_BYTES); long size = region.getMemStoreDataSize(); - Assert.assertEquals(0, size); + assertEquals(0, size); // Put one item into memstore. Measure the size of one item in memstore. Put p1 = new Put(row); p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[]) null)); @@ -978,8 +968,8 @@ public void testRecoveredEditsReplayCompaction(boolean mismatchedRegionName) thr FileStatus[] files = CommonFSUtils.listStatus(fs, tmpDir); String errorMsg = "Expected to find 1 file in the region temp directory " + "from the compaction, could not find any"; - assertNotNull(errorMsg, files); - assertEquals(errorMsg, 1, files.length); + assertNotNull(files, errorMsg); + assertEquals(1, files.length, errorMsg); // move the file inside region dir Path newFile = region.getRegionFileSystem().commitStoreFile(Bytes.toString(family), files[0].getPath()); @@ -1031,7 +1021,7 @@ public void testRecoveredEditsReplayCompaction(boolean mismatchedRegionName) thr assertEquals(1, region.getStore(family).getStorefilesCount()); } files = CommonFSUtils.listStatus(fs, tmpDir); - assertTrue("Expected to find 0 files inside " + tmpDir, files == null || files.length == 0); + assertTrue(files == null || files.length == 0, "Expected to find 0 files inside " + tmpDir); for (long i = minSeqId; i < maxSeqId; i++) { Get get = new Get(Bytes.toBytes(i)); @@ -1365,7 +1355,7 @@ public void testGetWhileRegionClose() throws IOException { } if (t.e != null) { LOG.info("Exception=" + t.e); - assertFalse("Found a NPE in " + t.getName(), t.e instanceof NullPointerException); + assertFalse(t.e instanceof NullPointerException, "Found a NPE in " + t.getName()); } } } @@ -1407,7 +1397,7 @@ public void run() { */ @Test public void testWeirdCacheBehaviour() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(name); byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"), Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"), Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") }; this.region = initHRegion(tableName, method, CONF, FAMILIES); @@ -1423,29 +1413,29 @@ public void testWeirdCacheBehaviour() throws Exception { putRows(this.region, 3, value2, keyPrefix2); putRows(this.region, 3, value2, keyPrefix3); System.out.println("Checking values for key: " + keyPrefix1); - assertEquals("Got back incorrect number of rows from scan", 3, - getNumberOfRows(keyPrefix1, value2, this.region)); + assertEquals(3, getNumberOfRows(keyPrefix1, value2, this.region), + "Got back incorrect number of rows from scan"); System.out.println("Checking values for key: " + keyPrefix2); - assertEquals("Got back incorrect number of rows from scan", 3, - getNumberOfRows(keyPrefix2, value2, this.region)); + assertEquals(3, getNumberOfRows(keyPrefix2, value2, this.region), + "Got back incorrect number of rows from scan"); System.out.println("Checking values for key: " + keyPrefix3); - assertEquals("Got back incorrect number of rows from scan", 3, - getNumberOfRows(keyPrefix3, value2, this.region)); + assertEquals(3, getNumberOfRows(keyPrefix3, value2, this.region), + "Got back incorrect number of rows from scan"); deleteColumns(this.region, value2, keyPrefix1); deleteColumns(this.region, value2, keyPrefix2); deleteColumns(this.region, value2, keyPrefix3); System.out.println("Starting important checks....."); - assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1, 0, - getNumberOfRows(keyPrefix1, value2, this.region)); - assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2, 0, - getNumberOfRows(keyPrefix2, value2, this.region)); - assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3, 0, - getNumberOfRows(keyPrefix3, value2, this.region)); + assertEquals(0, getNumberOfRows(keyPrefix1, value2, this.region), + "Got back incorrect number of rows from scan: " + keyPrefix1); + assertEquals(0, getNumberOfRows(keyPrefix2, value2, this.region), + "Got back incorrect number of rows from scan: " + keyPrefix2); + assertEquals(0, getNumberOfRows(keyPrefix3, value2, this.region), + "Got back incorrect number of rows from scan: " + keyPrefix3); } @Test public void testAppendWithReadOnlyTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(name); this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily")); boolean exceptionCaught = false; Append append = new Append(Bytes.toBytes("somerow")); @@ -1462,7 +1452,7 @@ public void testAppendWithReadOnlyTable() throws Exception { @Test public void testIncrWithReadOnlyTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(name); this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily")); boolean exceptionCaught = false; Increment inc = new Increment(Bytes.toBytes("somerow")); @@ -1494,7 +1484,7 @@ private void deleteColumns(HRegion r, String value, String keyPrefix) throws IOE results.clear(); } while (more); } - assertEquals("Did not perform correct number of deletes", 3, count); + assertEquals(3, count, "Did not perform correct number of deletes"); } private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception { @@ -1734,8 +1724,7 @@ public void doWork() throws IOException { // 3. Exception thrown in validation LOG.info("Next a batch put with one invalid family"); puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value); - thrown.expect(NoSuchColumnFamilyException.class); - this.region.batchMutate(puts, true); + assertThrows(NoSuchColumnFamilyException.class, () -> this.region.batchMutate(puts, true)); } @Test @@ -1913,14 +1902,14 @@ public void testCheckAndMutate_WithCorrectValue() throws IOException { // checkAndPut with correct value boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1), put); - assertEquals("First", true, res); + assertEquals(true, res, "First"); // checkAndDelete with correct value Delete delete = new Delete(row1, now + 1); delete.addColumn(fam1, qf1); res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1), delete); - assertEquals("Delete", true, res); + assertEquals(true, res, "Delete"); // Putting data in key put = new Put(row1); @@ -1930,14 +1919,14 @@ public void testCheckAndMutate_WithCorrectValue() throws IOException { // checkAndPut with correct value res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BigDecimalComparator(bd1), put); - assertEquals("Second put", true, res); + assertEquals(true, res, "Second put"); // checkAndDelete with correct value delete = new Delete(row1, now + 3); delete.addColumn(fam1, qf1); res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BigDecimalComparator(bd1), delete); - assertEquals("Second delete", true, res); + assertEquals(true, res, "Second delete"); } @Test @@ -2499,14 +2488,14 @@ public void testCheckAndMutateWithCorrectValue() throws IOException { // checkAndPut with correct value CheckAndMutateResult res = region.checkAndMutate( CheckAndMutate.newBuilder(row1).ifMatches(fam1, qf1, CompareOperator.EQUAL, val1).build(put)); - assertTrue("First", res.isSuccess()); + assertTrue(res.isSuccess(), "First"); // checkAndDelete with correct value Delete delete = new Delete(row1, now + 1); delete.addColumn(fam1, qf1); res = region.checkAndMutate(CheckAndMutate.newBuilder(row1) .ifMatches(fam1, qf1, CompareOperator.EQUAL, val1).build(delete)); - assertTrue("Delete", res.isSuccess()); + assertTrue(res.isSuccess(), "Delete"); assertNull(res.getResult()); // Putting data in key @@ -2517,7 +2506,7 @@ public void testCheckAndMutateWithCorrectValue() throws IOException { // checkAndPut with correct value res = region.checkAndMutate(CheckAndMutate.newBuilder(row1) .ifMatches(fam1, qf1, CompareOperator.EQUAL, Bytes.toBytes(bd1)).build(put)); - assertTrue("Second put", res.isSuccess()); + assertTrue(res.isSuccess(), "Second put"); assertNull(res.getResult()); // checkAndDelete with correct value @@ -2525,7 +2514,7 @@ public void testCheckAndMutateWithCorrectValue() throws IOException { delete.addColumn(fam1, qf1); res = region.checkAndMutate(CheckAndMutate.newBuilder(row1) .ifMatches(fam1, qf1, CompareOperator.EQUAL, Bytes.toBytes(bd1)).build(delete)); - assertTrue("Second delete", res.isSuccess()); + assertTrue(res.isSuccess(), "Second delete"); assertNull(res.getResult()); } @@ -3148,9 +3137,9 @@ public void testDelete_CheckFamily() throws IOException { // testing non existing family NavigableMap> deleteMap2 = new TreeMap<>(Bytes.BYTES_COMPARATOR); deleteMap2.put(fam4, kvs); - assertThrows("Family " + Bytes.toString(fam4) + " does exist", - NoSuchColumnFamilyException.class, - () -> region.delete(new Delete(forUnitTestsOnly, HConstants.LATEST_TIMESTAMP, deleteMap2))); + assertThrows(NoSuchColumnFamilyException.class, + () -> region.delete(new Delete(forUnitTestsOnly, HConstants.LATEST_TIMESTAMP, deleteMap2)), + "Family " + Bytes.toString(fam4) + " does exist"); } @Test @@ -3271,8 +3260,8 @@ public void testPutWithLatestTS() throws IOException { assertEquals(1, result.size()); Cell kv = result.rawCells()[0]; LOG.info("Got: " + kv); - assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp", - kv.getTimestamp() != HConstants.LATEST_TIMESTAMP); + assertTrue(kv.getTimestamp() != HConstants.LATEST_TIMESTAMP, + "LATEST_TIMESTAMP was not replaced with real timestamp"); // Check same with WAL enabled (historically these took different // code paths, so check both) @@ -3287,8 +3276,8 @@ public void testPutWithLatestTS() throws IOException { assertEquals(1, result.size()); kv = result.rawCells()[0]; LOG.info("Got: " + kv); - assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp", - kv.getTimestamp() != HConstants.LATEST_TIMESTAMP); + assertTrue(kv.getTimestamp() != HConstants.LATEST_TIMESTAMP, + "LATEST_TIMESTAMP was not replaced with real timestamp"); } /** @@ -3315,7 +3304,7 @@ public void testPutWithTsSlop() throws IOException { LOG.debug("Received expected exception", ioe); caughtExcep = true; } - assertTrue("Should catch FailedSanityCheckException", caughtExcep); + assertTrue(caughtExcep, "Should catch FailedSanityCheckException"); } @Test @@ -3434,8 +3423,8 @@ public Void answer(InvocationOnMock invocation) throws Throwable { region.put(originalPut); region.setCoprocessorHost(normalCPHost); final long finalSize = region.getDataInMemoryWithoutWAL(); - assertEquals("finalSize:" + finalSize + ", initSize:" + initSize + ", delta:" + delta, - finalSize, initSize + delta); + assertEquals(finalSize, initSize + delta, + "finalSize:" + finalSize + ", initSize:" + initSize + ", delta:" + delta); } @Test @@ -3779,13 +3768,13 @@ public void testRegionScanner_getFilesRead() throws IOException { expectedFilePaths.add(fs.makeQualified(storeFile.getPath())); } } - assertTrue("Should have at least one store file after flush", expectedFilePaths.size() >= 1); + assertTrue(expectedFilePaths.size() >= 1, "Should have at least one store file after flush"); // Get region scanner; before close getFilesRead must be empty. RegionScannerImpl scanner = region.getScanner(new Scan()); Set filesReadBeforeClose = scanner.getFilesRead(); - assertTrue("Should return empty set before closing", filesReadBeforeClose.isEmpty()); + assertTrue(filesReadBeforeClose.isEmpty(), "Should return empty set before closing"); // Drain scanner (next up to two rows) to exercise store heap reads. List cells = new ArrayList<>(); @@ -3800,15 +3789,15 @@ public void testRegionScanner_getFilesRead() throws IOException { // Still before close: set must remain empty until scanner is closed. filesReadBeforeClose = scanner.getFilesRead(); - assertTrue("Should return empty set before closing even after scanning", - filesReadBeforeClose.isEmpty()); + assertTrue(filesReadBeforeClose.isEmpty(), + "Should return empty set before closing even after scanning"); scanner.close(); // After close: set must contain exactly the expected store file paths. Set filesReadAfterClose = scanner.getFilesRead(); - assertEquals("Should have exact file count after closing", expectedFilePaths.size(), - filesReadAfterClose.size()); - assertEquals("Should contain all expected file paths", expectedFilePaths, filesReadAfterClose); + assertEquals(expectedFilePaths.size(), filesReadAfterClose.size(), + "Should have exact file count after closing"); + assertEquals(expectedFilePaths, filesReadAfterClose, "Should contain all expected file paths"); } @Test @@ -4336,9 +4325,9 @@ public void testScanner_JoinedScanners() throws IOException { assertTrue(s.next(results)); assertEquals(3, results.size()); - assertTrue("orderCheck", CellUtil.matchingFamily(results.get(0), cf_alpha)); - assertTrue("orderCheck", CellUtil.matchingFamily(results.get(1), cf_essential)); - assertTrue("orderCheck", CellUtil.matchingFamily(results.get(2), cf_joined)); + assertTrue(CellUtil.matchingFamily(results.get(0), cf_alpha), "orderCheck"); + assertTrue(CellUtil.matchingFamily(results.get(1), cf_essential), "orderCheck"); + assertTrue(CellUtil.matchingFamily(results.get(2), cf_joined), "orderCheck"); results.clear(); assertFalse(s.next(results)); @@ -4530,9 +4519,8 @@ public void testFlushCacheWhileScanning() throws IOException, InterruptedExcepti if (!toggle) { flushThread.flush(); } - assertEquals( - "toggle=" + toggle + "i=" + i + " ts=" + EnvironmentEdgeManager.currentTime(), - expectedCount, res.size()); + assertEquals(expectedCount, res.size(), + "toggle=" + toggle + "i=" + i + " ts=" + EnvironmentEdgeManager.currentTime()); toggle = !toggle; } } @@ -4678,10 +4666,10 @@ public void testWritesWhileScanning() throws IOException, InterruptedException { } while (moreRows); } if (!res.isEmpty() || !previousEmpty || i > compactInterval) { - assertEquals("i=" + i, expectedCount, res.size()); + assertEquals(expectedCount, res.size(), "i=" + i); long timestamp = res.get(0).getTimestamp(); - assertTrue("Timestamps were broke: " + timestamp + " prev: " + prevTimestamp, - timestamp >= prevTimestamp); + assertTrue(timestamp >= prevTimestamp, + "Timestamps were broke: " + timestamp + " prev: " + prevTimestamp); prevTimestamp = timestamp; } } @@ -4888,7 +4876,7 @@ public void doAnAction() throws Exception { boolean previousEmpty = result == null || result.isEmpty(); result = region.get(get); if (!result.isEmpty() || !previousEmpty || i > compactInterval) { - assertEquals("i=" + i, expectedCount, result.size()); + assertEquals(expectedCount, result.size(), "i=" + i); // TODO this was removed, now what dangit?! // search looking for the qualifier in question? long timestamp = 0; @@ -5047,7 +5035,7 @@ public void testBloomFilterSize() throws IOException { @Test public void testAllColumnsWithBloomFilter() throws IOException { - byte[] TABLE = Bytes.toBytes(name.getMethodName()); + byte[] TABLE = Bytes.toBytes(name); byte[] FAMILY = Bytes.toBytes("family"); // Create table @@ -5174,7 +5162,7 @@ public void testgetHDFSBlocksDistribution() throws Exception { String msg = "uniqueBlocksWeight=" + uniqueBlocksWeight1 + ", topHostWeight=" + topHostWeight + ", topHost=" + topHost + "; " + sb.toString(); LOG.info(msg); - assertTrue(msg, uniqueBlocksWeight1 == topHostWeight); + assertTrue(uniqueBlocksWeight1 == topHostWeight, msg); // use the static method to compute the value, it should be the same. // static method is used by load balancer or other components @@ -5219,8 +5207,8 @@ public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() th !(monitoredTask instanceof MonitoredRPCHandler) && monitoredTask.getDescription().contains(region.toString()) ) { - assertTrue("Region state should be ABORTED.", - monitoredTask.getState().equals(MonitoredTask.State.ABORTED)); + assertTrue(monitoredTask.getState().equals(MonitoredTask.State.ABORTED), + "Region state should be ABORTED."); break; } } @@ -5236,7 +5224,7 @@ public void testRegionInfoFileCreation() throws IOException { Path rootDir = new Path(dir + "testRegionInfoFileCreation"); TableDescriptorBuilder tableDescriptorBuilder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); @@ -5253,8 +5241,8 @@ public void testRegionInfoFileCreation() throws IOException { Path regionInfoFile = new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE); // Verify that the .regioninfo file is present - assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir", - fs.exists(regionInfoFile)); + assertTrue(fs.exists(regionInfoFile), + HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir"); // Try to open the region region = HRegion.openHRegion(rootDir, hri, tableDescriptor, null, CONF); @@ -5262,13 +5250,13 @@ public void testRegionInfoFileCreation() throws IOException { HBaseTestingUtil.closeRegionAndWAL(region); // Verify that the .regioninfo file is still there - assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir", - fs.exists(regionInfoFile)); + assertTrue(fs.exists(regionInfoFile), + HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir"); // Remove the .regioninfo file and verify is recreated on region open fs.delete(regionInfoFile, true); - assertFalse(HRegionFileSystem.REGION_INFO_FILE + " should be removed from the region dir", - fs.exists(regionInfoFile)); + assertFalse(fs.exists(regionInfoFile), + HRegionFileSystem.REGION_INFO_FILE + " should be removed from the region dir"); region = HRegion.openHRegion(rootDir, hri, tableDescriptor, null, CONF); // region = TEST_UTIL.openHRegion(hri, htd); @@ -5276,8 +5264,8 @@ public void testRegionInfoFileCreation() throws IOException { HBaseTestingUtil.closeRegionAndWAL(region); // Verify that the .regioninfo file is still there - assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir", - fs.exists(new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE))); + assertTrue(fs.exists(new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE)), + HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir"); region = null; } @@ -5523,7 +5511,7 @@ public void testPutWithMemStoreFlush() throws Exception { * not work for {@link FSHLog} also. */ @Test - @Ignore + @Disabled public void testDurability() throws Exception { // there are 5 x 5 cases: // table durability(SYNC,FSYNC,ASYC,SKIP,USE_DEFAULT) x mutation @@ -5630,14 +5618,13 @@ public boolean evaluate() throws Exception { public void testRegionReplicaSecondary() throws IOException { // create a primary region, load some data and flush // create a secondary region, and do a get against that - Path rootDir = new Path(dir + name.getMethodName()); + Path rootDir = new Path(dir + name); CommonFSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir); byte[][] families = new byte[][] { Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3") }; byte[] cq = Bytes.toBytes("cq"); - TableDescriptorBuilder builder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)); for (byte[] family : families) { builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } @@ -5678,14 +5665,13 @@ public void testRegionReplicaSecondary() throws IOException { public void testRegionReplicaSecondaryIsReadOnly() throws IOException { // create a primary region, load some data and flush // create a secondary region, and do a put against that - Path rootDir = new Path(dir + name.getMethodName()); + Path rootDir = new Path(dir + name); CommonFSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir); byte[][] families = new byte[][] { Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3") }; byte[] cq = Bytes.toBytes("cq"); - TableDescriptorBuilder builder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)); for (byte[] family : families) { builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } @@ -5735,14 +5721,13 @@ static WALFactory createWALFactory(Configuration conf, Path rootDir) throws IOEx @Test public void testCompactionFromPrimary() throws IOException { - Path rootDir = new Path(dir + name.getMethodName()); + Path rootDir = new Path(dir + name); CommonFSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir); byte[][] families = new byte[][] { Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3") }; byte[] cq = Bytes.toBytes("cq"); - TableDescriptorBuilder builder = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())); + TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)); for (byte[] family : families) { builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); } @@ -5779,7 +5764,7 @@ public void testCompactionFromPrimary() throws IOException { .withRegionFileSystem(regionFs).build()); sft.removeStoreFiles(storeFiles.stream().collect(Collectors.toList())); Collection storeFileInfos = sft.load(); - Assert.assertTrue(storeFileInfos == null || storeFileInfos.isEmpty()); + assertTrue(storeFileInfos == null || storeFileInfos.isEmpty()); verifyData(secondaryRegion, 0, 1000, cq, families); } finally { @@ -5968,15 +5953,15 @@ protected HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopK */ private void checkOneCell(Cell kv, byte[] cf, int rowIdx, int colIdx, long ts) { String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts; - assertEquals("Row mismatch which checking: " + ctx, "row:" + rowIdx, - Bytes.toString(CellUtil.cloneRow(kv))); - assertEquals("ColumnFamily mismatch while checking: " + ctx, Bytes.toString(cf), - Bytes.toString(CellUtil.cloneFamily(kv))); - assertEquals("Column qualifier mismatch while checking: " + ctx, "column:" + colIdx, - Bytes.toString(CellUtil.cloneQualifier(kv))); - assertEquals("Timestamp mismatch while checking: " + ctx, ts, kv.getTimestamp()); - assertEquals("Value mismatch while checking: " + ctx, "value-version-" + ts, - Bytes.toString(CellUtil.cloneValue(kv))); + assertEquals("row:" + rowIdx, Bytes.toString(CellUtil.cloneRow(kv)), + "Row mismatch which checking: " + ctx); + assertEquals(Bytes.toString(cf), Bytes.toString(CellUtil.cloneFamily(kv)), + "ColumnFamily mismatch while checking: " + ctx); + assertEquals("column:" + colIdx, Bytes.toString(CellUtil.cloneQualifier(kv)), + "Column qualifier mismatch while checking: " + ctx); + assertEquals(ts, kv.getTimestamp(), "Timestamp mismatch while checking: " + ctx); + assertEquals("value-version-" + ts, Bytes.toString(CellUtil.cloneValue(kv)), + "Value mismatch while checking: " + ctx); } @Test @@ -6592,8 +6577,8 @@ public void testReverseScanShouldNotScanMemstoreIfReadPtLesser() throws Exceptio if (!assertDone) { StoreScanner current = (StoreScanner) (scanner.storeHeap).getCurrentForTesting(); List scanners = current.getAllScannersForTesting(); - assertEquals("There should be only one scanner the store file scanner", 1, - scanners.size()); + assertEquals(1, scanners.size(), + "There should be only one scanner the store file scanner"); assertDone = true; } } while (hasNext); @@ -6650,29 +6635,29 @@ public void testWriteRequestsCounter() throws IOException { byte[][] families = { fam }; this.region = initHRegion(tableName, method, CONF, families); - Assert.assertEquals(0L, region.getWriteRequestsCount()); + assertEquals(0L, region.getWriteRequestsCount()); Put put = new Put(row); put.addColumn(fam, fam, fam); - Assert.assertEquals(0L, region.getWriteRequestsCount()); + assertEquals(0L, region.getWriteRequestsCount()); region.put(put); - Assert.assertEquals(1L, region.getWriteRequestsCount()); + assertEquals(1L, region.getWriteRequestsCount()); region.put(put); - Assert.assertEquals(2L, region.getWriteRequestsCount()); + assertEquals(2L, region.getWriteRequestsCount()); region.put(put); - Assert.assertEquals(3L, region.getWriteRequestsCount()); + assertEquals(3L, region.getWriteRequestsCount()); region.delete(new Delete(row)); - Assert.assertEquals(4L, region.getWriteRequestsCount()); + assertEquals(4L, region.getWriteRequestsCount()); } @Test public void testOpenRegionWrittenToWAL() throws Exception { - final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42); + final ServerName serverName = ServerName.valueOf(name, 100, 42); final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName)); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam2)).build(); RegionInfo hri = RegionInfoBuilder.newBuilder(htd.getTableName()).build(); @@ -6744,7 +6729,7 @@ protected long getNextSequenceId(WAL wal) throws IOException { @Test public void testFlushedFileWithNoTags() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(name); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build(); @@ -6758,8 +6743,8 @@ public void testFlushedFileWithNoTags() throws Exception { HStore store = region.getStore(fam1); Collection storefiles = store.getStorefiles(); for (HStoreFile sf : storefiles) { - assertFalse("Tags should not be present ", - sf.getReader().getHFileReader().getFileContext().isIncludesTags()); + assertFalse(sf.getReader().getHFileReader().getFileContext().isIncludesTags(), + "Tags should not be present "); } } @@ -6796,13 +6781,13 @@ public Long answer(InvocationOnMock invocation) throws Throwable { @Test public void testCloseRegionWrittenToWAL() throws Exception { - Path rootDir = new Path(dir + name.getMethodName()); + Path rootDir = new Path(dir + name); CommonFSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir); final ServerName serverName = ServerName.valueOf("testCloseRegionWrittenToWAL", 100, 42); final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName)); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) + TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam2)).build(); RegionInfo hri = RegionInfoBuilder.newBuilder(htd.getTableName()).build(); @@ -6915,10 +6900,9 @@ public void testCellTTLs() throws IOException { final byte[] q4 = Bytes.toBytes("q4"); // 10 seconds - TableDescriptor tableDescriptor = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(fam1).setTimeToLive(10).build()) - .build(); + TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(fam1).setTimeToLive(10).build()) + .build(); Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS); @@ -7044,7 +7028,7 @@ public void testTTLsUsingSmallHeartBeatCells() throws IOException { // 10 seconds int ttlSecs = 10; TableDescriptor tableDescriptor = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).setColumnFamily( + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(fam1).setTimeToLive(ttlSecs).build()).build(); Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); @@ -7577,18 +7561,17 @@ public void testMutateRow_WriteRequestCount() throws Exception { long wrcBeforeMutate = this.region.writeRequestsCount.longValue(); this.region.mutateRow(rm); long wrcAfterMutate = this.region.writeRequestsCount.longValue(); - Assert.assertEquals(wrcBeforeMutate + rm.getMutations().size(), wrcAfterMutate); + assertEquals(wrcBeforeMutate + rm.getMutations().size(), wrcAfterMutate); } @Test public void testBulkLoadReplicationEnabled() throws IOException { TEST_UTIL.getConfiguration().setBoolean(HConstants.REPLICATION_BULKLOAD_ENABLE_KEY, true); - final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42); + final ServerName serverName = ServerName.valueOf(name, 100, 42); final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName)); - TableDescriptor tableDescriptor = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)).build(); + TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam1)).build(); RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build(); TEST_UTIL.createRegionDir(hri); region = HRegion.openHRegion(hri, tableDescriptor, rss.getWAL(hri), @@ -7721,7 +7704,7 @@ public void run() { region = null; holder.join(); - assertFalse("Region lock holder should not have been interrupted", holderInterrupted.get()); + assertFalse(holderInterrupted.get(), "Region lock holder should not have been interrupted"); } @Test @@ -7770,7 +7753,7 @@ public void run() { region = null; holder.join(); - assertTrue("Region lock holder was not interrupted", holderInterrupted.get()); + assertTrue(holderInterrupted.get(), "Region lock holder was not interrupted"); } @Test @@ -7889,7 +7872,7 @@ public void run() { region = null; holder.join(); - assertFalse("Region lock holder should not have been interrupted", holderInterrupted.get()); + assertFalse(holderInterrupted.get(), "Region lock holder should not have been interrupted"); } @Test @@ -7987,14 +7970,14 @@ public void testHRegionInitializeFailsWithDeletedRegionDir() throws Exception { // Verify region directory exists Path regionDir = new Path(tableDir, regionInfo.getEncodedName()); - assertTrue("Region directory should exist after creation", fs.exists(regionDir)); + assertTrue(fs.exists(regionDir), "Region directory should exist after creation"); Path regionInfoFile = new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE); - assertTrue("Region info file should exist after creation", fs.exists(regionInfoFile)); + assertTrue(fs.exists(regionInfoFile), "Region info file should exist after creation"); // Delete the region directory (simulating external deletion or corruption) assertTrue(fs.delete(regionDir, true)); - assertFalse("Region directory should not exist after deletion", fs.exists(regionDir)); + assertFalse(fs.exists(regionDir), "Region directory should not exist after deletion"); // Try to open/initialize the region again - this should fail LOG.info("Attempting to re-initialize region with deleted directory - should fail"); @@ -8010,11 +7993,11 @@ public void testHRegionInitializeFailsWithDeletedRegionDir() throws Exception { } // Verify the exception is related to missing parent directory - assertNotNull("Exception should be thrown", regionInitializeException); + assertNotNull(regionInitializeException, "Exception should be thrown"); String exceptionMessage = regionInitializeException.getMessage().toLowerCase(); assertTrue(exceptionMessage.contains("region directory does not exist")); - assertFalse("Region directory should still not exist after failed initialization", - fs.exists(regionDir)); + assertFalse(fs.exists(regionDir), + "Region directory should still not exist after failed initialization"); } finally { if (wal != null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java index db2e55aaf5d8..ce4e39541b2d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.net.URI; @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -52,21 +51,17 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.util.Progressable; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestHRegionFileSystem { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionFileSystem.class); - private static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final Logger LOG = LoggerFactory.getLogger(TestHRegionFileSystem.class); @@ -74,9 +69,12 @@ public class TestHRegionFileSystem { private static final byte[][] FAMILIES = { Bytes.add(FAMILY_NAME, Bytes.toBytes("-A")), Bytes.add(FAMILY_NAME, Bytes.toBytes("-B")) }; private static final TableName TABLE_NAME = TableName.valueOf("TestTable"); + private String name; - @Rule - public TestName name = new TestName(); + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } @Test public void testBlockStoragePolicy() throws Exception { @@ -84,7 +82,7 @@ public void testBlockStoragePolicy() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); TEST_UTIL.startMiniCluster(); Table table = TEST_UTIL.createTable(TABLE_NAME, FAMILIES); - assertEquals("Should start with empty table", 0, TEST_UTIL.countRows(table)); + assertEquals(0, TEST_UTIL.countRows(table), "Should start with empty table"); HRegionFileSystem regionFs = getHRegionFS(TEST_UTIL.getConnection(), table, conf); // the original block storage policy would be HOT String spA = regionFs.getStoragePolicyName(Bytes.toString(FAMILIES[0])); @@ -200,18 +198,18 @@ private HRegionFileSystem getHRegionFS(Connection conn, Table table, Configurati @Test public void testOnDiskRegionCreation() throws IOException { - Path rootDir = TEST_UTIL.getDataTestDirOnTestFS(name.getMethodName()); + Path rootDir = TEST_UTIL.getDataTestDirOnTestFS(name); FileSystem fs = TEST_UTIL.getTestFileSystem(); Configuration conf = TEST_UTIL.getConfiguration(); // Create a Region - RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name)).build(); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, CommonFSUtils.getTableDir(rootDir, hri.getTable()), hri); // Verify if the region is on disk Path regionDir = regionFs.getRegionDir(); - assertTrue("The region folder should be created", fs.exists(regionDir)); + assertTrue(fs.exists(regionDir), "The region folder should be created"); // Verify the .regioninfo RegionInfo hriVerify = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir); @@ -225,33 +223,33 @@ public void testOnDiskRegionCreation() throws IOException { // Delete the region HRegionFileSystem.deleteRegionFromFileSystem(conf, fs, CommonFSUtils.getTableDir(rootDir, hri.getTable()), hri); - assertFalse("The region folder should be removed", fs.exists(regionDir)); + assertFalse(fs.exists(regionDir), "The region folder should be removed"); fs.delete(rootDir, true); } @Test public void testNonIdempotentOpsWithRetries() throws IOException { - Path rootDir = TEST_UTIL.getDataTestDirOnTestFS(name.getMethodName()); + Path rootDir = TEST_UTIL.getDataTestDirOnTestFS(name); FileSystem fs = TEST_UTIL.getTestFileSystem(); Configuration conf = TEST_UTIL.getConfiguration(); // Create a Region - RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name)).build(); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, rootDir, hri); assertTrue(fs.exists(regionFs.getRegionDir())); regionFs = new HRegionFileSystem(conf, new MockFileSystemForCreate(), rootDir, hri); boolean result = regionFs.createDir(new Path("/foo/bar")); - assertTrue("Couldn't create the directory", result); + assertTrue(result, "Couldn't create the directory"); regionFs = new HRegionFileSystem(conf, new MockFileSystem(), rootDir, hri); result = regionFs.rename(new Path("/foo/bar"), new Path("/foo/bar2")); - assertTrue("Couldn't rename the directory", result); + assertTrue(result, "Couldn't rename the directory"); regionFs = new HRegionFileSystem(conf, new MockFileSystem(), rootDir, hri); result = regionFs.deleteDir(new Path("/foo/bar")); - assertTrue("Couldn't delete the directory", result); + assertTrue(result, "Couldn't delete the directory"); fs.delete(rootDir, true); } @@ -359,7 +357,7 @@ public void testTempAndCommit() throws IOException { // Create a Region String familyName = "cf"; - RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name)).build(); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, rootDir, hri); StoreContext storeContext = StoreContext.getBuilder() .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(familyName)) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java index 63d12835d5d2..5b0e7e3a1fed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java @@ -17,14 +17,13 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; import org.apache.hadoop.hbase.TableName; @@ -43,11 +42,10 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,18 +53,18 @@ * Tests that need to spin up a cluster testing an {@link HRegion}. Use {@link TestHRegion} if you * don't need a cluster, if you can test w/ a standalone {@link HRegion}. */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestHRegionOnCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionOnCluster.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHRegionOnCluster.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + private String name; - @Rule - public TestName name = new TestName(); + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } @Test public void testDataCorrectnessReplayingRecoveredEdits() throws Exception { @@ -75,7 +73,7 @@ public void testDataCorrectnessReplayingRecoveredEdits() throws Exception { TEST_UTIL.startMiniCluster(NUM_RS); try { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(name); final byte[] FAMILY = Bytes.toBytes("family"); SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HMaster master = cluster.getMaster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index c077bc74b62b..69db14c3a38e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -20,12 +20,12 @@ import static org.apache.hadoop.hbase.regionserver.TestHRegion.assertGet; import static org.apache.hadoop.hbase.regionserver.TestHRegion.putData; import static org.apache.hadoop.hbase.regionserver.TestHRegion.verifyData; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -85,15 +84,13 @@ import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay; import org.apache.hadoop.hbase.wal.WALStreamReader; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -117,16 +114,11 @@ * region replicas */ @SuppressWarnings("deprecation") -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestHRegionReplayEvents { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionReplayEvents.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHRegionReplayEvents.class); - @Rule - public TestName name = new TestName(); + private String name; private static HBaseTestingUtil TEST_UTIL; @@ -152,28 +144,29 @@ public class TestHRegionReplayEvents { private WAL walPrimary, walSecondary; private WALStreamReader reader; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL = new HBaseTestingUtil(); TEST_UTIL.startMiniDFSCluster(1); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { LOG.info("Cleaning test directory: " + TEST_UTIL.getDataTestDir()); TEST_UTIL.cleanupTestDir(); TEST_UTIL.shutdownMiniDFSCluster(); } - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + this.name = testInfo.getTestMethod().get().getName(); CONF = TEST_UTIL.getConfiguration(); dir = TEST_UTIL.getDataTestDir("TestHRegionReplayEvents").toString(); - method = name.getMethodName(); - tableName = Bytes.toBytes(name.getMethodName()); + method = name; + tableName = Bytes.toBytes(name); rootDir = new Path(dir + method); TEST_UTIL.getConfiguration().set(HConstants.HBASE_DIR, rootDir.toString()); - method = name.getMethodName(); + method = name; TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(method)); for (byte[] family : families) { builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)); @@ -218,7 +211,7 @@ public void setUp() throws Exception { reader = null; } - @After + @AfterEach public void tearDown() throws Exception { if (reader != null) { reader.close(); @@ -235,7 +228,7 @@ public void tearDown() throws Exception { } String getName() { - return name.getMethodName(); + return name; } // Some of the test cases are as follows: @@ -1116,7 +1109,7 @@ public void testReplayFlushSeqIds() throws IOException { public void testSeqIdsFromReplay() throws IOException { // test the case where seqId's coming from replayed WALEdits are made persisted with their // original seqIds and they are made visible through mvcc read point upon replay - String method = name.getMethodName(); + String method = name; byte[] tableName = Bytes.toBytes(method); byte[] family = Bytes.toBytes("family"); @@ -1557,7 +1550,7 @@ public void testReplayBulkLoadEvent() throws IOException { storeFileNames.remove(sf.getPath().getName()); } } - assertTrue("Found some store file isn't loaded:" + storeFileNames, storeFileNames.isEmpty()); + assertTrue(storeFileNames.isEmpty(), "Found some store file isn't loaded:" + storeFileNames); LOG.info("-- Verifying edits from secondary"); for (byte[] family : families) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index 4206036df57d..4f8b983044e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -24,20 +24,20 @@ import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.KeyValue; @@ -78,13 +78,10 @@ import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -94,14 +91,11 @@ * Tests bulk loading of HFiles and shows the atomicity or lack of atomicity of the region server's * bullkLoad functionality. */ -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: duration={0}") public class TestHRegionServerBulkLoad { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionServerBulkLoad.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class); protected static HBaseTestingUtil UTIL = new HBaseTestingUtil(); protected final static Configuration conf = UTIL.getConfiguration(); @@ -118,21 +112,20 @@ public class TestHRegionServerBulkLoad { } } - @Parameters - public static final Collection parameters() { + public static final Stream parameters() { int[] sleepDurations = new int[] { 0, 30000 }; - List configurations = new ArrayList<>(); + List configurations = new ArrayList<>(); for (int i : sleepDurations) { - configurations.add(new Object[] { i }); + configurations.add(Arguments.of(i)); } - return configurations; + return configurations.stream(); } public TestHRegionServerBulkLoad(int duration) { this.sleepDuration = duration; } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { conf.setInt("hbase.rpc.timeout", 10 * 1000); } @@ -322,7 +315,7 @@ public void setupTable(TableName table, int cfs) throws IOException { /** * Atomic bulk load. */ - @Test + @TestTemplate public void testAtomicBulkLoad() throws Exception { TableName TABLE_NAME = TableName.valueOf("atomicBulkLoad"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java index 452173df0517..9ec3206296a8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java @@ -17,17 +17,16 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule; +import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.TableNameTestRule; +import org.apache.hadoop.hbase.TableNameTestExtension; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.CheckAndMutate; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -46,23 +45,19 @@ import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.apache.hbase.thirdparty.com.google.common.io.Closeables; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestHRegionTracing { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionTracing.class); - private static HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static byte[] FAMILY = Bytes.toBytes("family"); @@ -73,23 +68,23 @@ public class TestHRegionTracing { private static byte[] VALUE = Bytes.toBytes("value"); - @Rule - public final OpenTelemetryRule traceRule = OpenTelemetryRule.create(); + @RegisterExtension + public static final OpenTelemetryExtension traceRule = OpenTelemetryExtension.create(); - @Rule - public final TableNameTestRule tableNameRule = new TableNameTestRule(); + @RegisterExtension + public final TableNameTestExtension tableNameRule = new TableNameTestExtension(); private WAL wal; private HRegion region; - @AfterClass + @AfterAll public static void tearDownAfterClass() throws IOException { UTIL.cleanupTestDir(); } - @Before - public void setUp() throws IOException { + @BeforeEach + public void setUp() throws Throwable { TableName tableName = tableNameRule.getTableName(); TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); @@ -102,7 +97,7 @@ public void setUp() throws IOException { region = UTIL.createLocalHRegion(info, desc); } - @After + @AfterEach public void tearDown() throws IOException { if (region != null) { region.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java index 27bdae43857f..2abdf1c5a1d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; @@ -30,21 +29,17 @@ import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * A test similar to TestHRegion, but with in-memory flush families. Also checks wal truncation * after in-memory compaction. */ -@Category({ VerySlowRegionServerTests.class, LargeTests.class }) +@Tag(VerySlowRegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestHRegionWithInMemoryFlush extends TestHRegion { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHRegionWithInMemoryFlush.class); - /** * @return A region on which you must call {@link HBaseTestingUtil#closeRegionAndWAL(HRegion)} * when done. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index 179297bd873f..740ed48c5c37 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -24,12 +24,12 @@ import static org.apache.hadoop.hbase.io.hfile.CacheConfig.DEFAULT_EVICT_ON_CLOSE; import static org.apache.hadoop.hbase.io.hfile.CacheConfig.EVICT_BLOCKS_ON_CLOSE_KEY; import static org.apache.hadoop.hbase.regionserver.DefaultStoreEngine.DEFAULT_COMPACTION_POLICY_CLASS_KEY; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -83,7 +83,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -137,14 +136,13 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.util.Progressable; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.Timeout; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -154,15 +152,12 @@ /** * Test class for the HStore */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestHStore { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestHStore.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHStore.class); - @Rule - public TestName name = new TestName(); + private String name; HRegion region; HStore store; @@ -189,8 +184,9 @@ public class TestHStore { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final String DIR = TEST_UTIL.getDataTestDir("TestStore").toString(); - @Before - public void setUp() throws IOException { + @BeforeEach + public void setUp(TestInfo testInfo) throws IOException { + this.name = testInfo.getTestMethod().get().getName(); qualifiers.clear(); qualifiers.add(qf1); qualifiers.add(qf3); @@ -269,13 +265,14 @@ private HStore init(String methodName, Configuration conf, TableDescriptorBuilde /** * Test we do not lose data if we fail a flush and then close. Part of HBase-10466 */ + @Test public void testFlushSizeSizing() throws Exception { - LOG.info("Setting up a faulty file system that cannot write in " + this.name.getMethodName()); + LOG.info("Setting up a faulty file system that cannot write in " + name); final Configuration conf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); // Only retry once. conf.setInt("hbase.hstore.flush.retries.number", 1); - User user = User.createUserForTesting(conf, this.name.getMethodName(), new String[] { "foo" }); + User user = User.createUserForTesting(conf, name, new String[] { "foo" }); // Inject our faulty LocalFileSystem conf.setClass("fs.file.impl", FaultyFileSystem.class, FileSystem.class); user.runAs(new PrivilegedExceptionAction() { @@ -287,7 +284,7 @@ public Object run() throws Exception { FaultyFileSystem ffs = (FaultyFileSystem) fs; // Initialize region - init(name.getMethodName(), conf); + init(name, conf); MemStoreSize mss = store.memstore.getFlushableSize(); assertEquals(0, mss.getDataSize()); @@ -386,7 +383,7 @@ private void writeAndRead(BloomType bloomType, int numStoreFiles) throws IOExcep ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(family) .setCompressionType(Compression.Algorithm.GZ).setBloomFilterType(bloomType) .setConfiguration(BloomFilterUtil.PREFIX_LENGTH_KEY, "3").build(); - init(name.getMethodName(), conf, hcd); + init(name, conf, hcd); for (int i = 1; i <= numStoreFiles; i++) { byte[] row = Bytes.toBytes("row" + i); @@ -423,7 +420,7 @@ public void testCreateWriter() throws Exception { ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(family).setCompressionType(Compression.Algorithm.GZ) .setDataBlockEncoding(DataBlockEncoding.DIFF).build(); - init(name.getMethodName(), conf, hcd); + init(name, conf, hcd); // Test createWriter StoreFileWriter writer = store.getStoreEngine() @@ -465,8 +462,8 @@ public void testDeleteExpiredStoreFiles(int minVersions) throws Exception { // Set the compaction threshold higher to avoid normal compactions. conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_KEY, 5); - init(name.getMethodName() + "-" + minVersions, conf, ColumnFamilyDescriptorBuilder - .newBuilder(family).setMinVersions(minVersions).setTimeToLive(ttl).build()); + init(name + "-" + minVersions, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + .setMinVersions(minVersions).setTimeToLive(ttl).build()); long storeTtl = this.store.getScanInfo().getTtl(); long sleepTime = storeTtl / storeFileNum; @@ -525,7 +522,7 @@ public void testLowestModificationTime() throws Exception { Configuration conf = HBaseConfiguration.create(); FileSystem fs = FileSystem.get(conf); // Initialize region - init(name.getMethodName(), conf); + init(name, conf); int storeFileNum = 4; for (int i = 1; i <= storeFileNum; i++) { @@ -581,7 +578,7 @@ private static long getLowestTimeStampFromFS(FileSystem fs, */ @Test public void testEmptyStoreFile() throws IOException { - init(this.name.getMethodName()); + init(name); // Write a store file. this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null); this.store.add(new KeyValue(row, family, qf2, 1, (byte[]) null), null); @@ -613,7 +610,7 @@ public void testEmptyStoreFile() throws IOException { */ @Test public void testGet_FromMemStoreOnly() throws IOException { - init(this.name.getMethodName()); + init(name); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null); @@ -638,7 +635,7 @@ public void testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException { } private void testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException { - init(this.name.getMethodName(), TEST_UTIL.getConfiguration(), + init(name, TEST_UTIL.getConfiguration(), ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build()); long currentTs = 100; long minTs = currentTs; @@ -666,7 +663,7 @@ private void testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IO */ @Test public void testGet_FromFilesOnly() throws IOException { - init(this.name.getMethodName()); + init(name); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null); @@ -702,7 +699,7 @@ public void testGet_FromFilesOnly() throws IOException { */ @Test public void testGet_FromMemStoreAndFiles() throws IOException { - init(this.name.getMethodName()); + init(name); // Put data in memstore this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null); @@ -743,7 +740,7 @@ private void assertCheck() { } } - @After + @AfterEach public void tearDown() throws Exception { EnvironmentEdgeManagerTestHelper.reset(); if (store != null) { @@ -759,7 +756,7 @@ public void tearDown() throws Exception { } } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws IOException { TEST_UTIL.cleanupTestDir(); } @@ -780,7 +777,7 @@ public Object run() throws Exception { assertEquals(FaultyFileSystem.class, fs.getClass()); // Initialize region - init(name.getMethodName(), conf); + init(name, conf); LOG.info("Adding some data"); store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null); @@ -906,7 +903,7 @@ public void testMultipleTimestamps() throws IOException { long[] timestamps1 = new long[] { 1, 5, 10, 20 }; long[] timestamps2 = new long[] { 30, 80 }; - init(this.name.getMethodName()); + init(name); List kvList1 = getKeyValueSet(timestamps1, numRows, qf1, family); for (ExtendedCell kv : kvList1) { @@ -955,7 +952,7 @@ public void testMultipleTimestamps() throws IOException { */ @Test public void testSplitWithEmptyColFam() throws IOException { - init(this.name.getMethodName()); + init(name); assertFalse(store.getSplitPoint().isPresent()); } @@ -969,22 +966,20 @@ public void testStoreUsesConfigurationFromHcdAndHtd() throws Exception { // a number we pass in is higher than some config value, inside compactionPolicy. Configuration conf = HBaseConfiguration.create(); conf.setLong(CONFIG_KEY, anyValue); - init(name.getMethodName() + "-xml", conf); + init(name + "-xml", conf); assertTrue(store.throttleCompaction(anyValue + 1)); assertFalse(store.throttleCompaction(anyValue)); // HTD overrides XML. --anyValue; - init( - name.getMethodName() + "-htd", conf, TableDescriptorBuilder - .newBuilder(TableName.valueOf(table)).setValue(CONFIG_KEY, Long.toString(anyValue)), - ColumnFamilyDescriptorBuilder.of(family)); + init(name + "-htd", conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)) + .setValue(CONFIG_KEY, Long.toString(anyValue)), ColumnFamilyDescriptorBuilder.of(family)); assertTrue(store.throttleCompaction(anyValue + 1)); assertFalse(store.throttleCompaction(anyValue)); // HCD overrides them both. --anyValue; - init(name.getMethodName() + "-hcd", conf, + init(name + "-hcd", conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).setValue(CONFIG_KEY, Long.toString(anyValue)), ColumnFamilyDescriptorBuilder.newBuilder(family).setValue(CONFIG_KEY, Long.toString(anyValue)) @@ -1008,7 +1003,7 @@ protected void createComponents(Configuration conf, HStore store, CellComparator public void testStoreUsesSearchEngineOverride() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, DummyStoreEngine.class.getName()); - init(this.name.getMethodName(), conf); + init(name, conf); assertEquals(DummyStoreEngine.lastCreatedCompactor, this.store.storeEngine.getCompactor()); } @@ -1056,7 +1051,7 @@ private void closeCompactedFile(int index) throws IOException { @Test public void testRefreshStoreFiles() throws Exception { - init(name.getMethodName()); + init(name); assertEquals(0, this.store.getStorefilesCount()); @@ -1107,7 +1102,7 @@ public void testRefreshStoreFiles() throws Exception { @Test public void testRefreshStoreFilesNotChanged() throws IOException { - init(name.getMethodName()); + init(name); assertEquals(0, this.store.getStorefilesCount()); @@ -1136,7 +1131,7 @@ public void testRefreshStoreFilesNotChanged() throws IOException { public void testScanWithCompactionAfterFlush() throws Exception { TEST_UTIL.getConfiguration().set(DEFAULT_COMPACTION_POLICY_CLASS_KEY, EverythingPolicy.class.getName()); - init(name.getMethodName()); + init(name); assertEquals(0, this.store.getStorefilesCount()); @@ -1228,7 +1223,7 @@ public void testNumberOfMemStoreScannersAfterFlush() throws IOException { private void testNumberOfMemStoreScannersAfterFlush(List inputCellsBeforeSnapshot, List inputCellsAfterSnapshot) throws IOException { - init(this.name.getMethodName() + "-" + inputCellsBeforeSnapshot.size()); + init(name + "-" + inputCellsBeforeSnapshot.size()); TreeSet quals = new TreeSet<>(Bytes.BYTES_COMPARATOR); long seqId = Long.MIN_VALUE; for (ExtendedCell c : inputCellsBeforeSnapshot) { @@ -1259,10 +1254,9 @@ private void testNumberOfMemStoreScannersAfterFlush(List inputCell cellCount += cells.size(); assertEquals(more ? numberOfMemScannersAfterFlush : 0, countMemStoreScanner(s)); } while (more); - assertEquals( + assertEquals(inputCellsBeforeSnapshot.size() + inputCellsAfterSnapshot.size(), cellCount, "The number of cells added before snapshot is " + inputCellsBeforeSnapshot.size() - + ", The number of cells added after snapshot is " + inputCellsAfterSnapshot.size(), - inputCellsBeforeSnapshot.size() + inputCellsAfterSnapshot.size(), cellCount); + + ", The number of cells added after snapshot is " + inputCellsAfterSnapshot.size()); // the current scanners is cleared assertEquals(0, countMemStoreScanner(s)); } @@ -1386,7 +1380,7 @@ private void testFlushBeforeCompletingScan(MyListHook hook, Filter filter, int e MemStoreSizing memStoreSizing = new NonThreadSafeMemStoreSizing(); long ts = EnvironmentEdgeManager.currentTime(); long seqId = 100; - init(name.getMethodName(), conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)), + init(name, conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)), ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(1).build(), new MyStoreHook() { @Override @@ -1415,8 +1409,8 @@ public long getSmallestReadPoint(HStore store) { assertEquals(expectedSize, myList.size()); for (Cell c : myList) { byte[] actualValue = CellUtil.cloneValue(c); - assertTrue("expected:" + Bytes.toStringBinary(value1) + ", actual:" - + Bytes.toStringBinary(actualValue), Bytes.equals(actualValue, value1)); + assertTrue(Bytes.equals(actualValue, value1), "expected:" + Bytes.toStringBinary(value1) + + ", actual:" + Bytes.toStringBinary(actualValue)); } List normalList = new ArrayList<>(3); // r2 @@ -1424,8 +1418,8 @@ public long getSmallestReadPoint(HStore store) { assertEquals(3, normalList.size()); for (Cell c : normalList) { byte[] actualValue = CellUtil.cloneValue(c); - assertTrue("expected:" + Bytes.toStringBinary(value2) + ", actual:" - + Bytes.toStringBinary(actualValue), Bytes.equals(actualValue, value2)); + assertTrue(Bytes.equals(actualValue, value2), "expected:" + Bytes.toStringBinary(value2) + + ", actual:" + Bytes.toStringBinary(actualValue)); } } } @@ -1444,7 +1438,7 @@ public void testFlushBeforeCompletingScanWithDeleteCell() throws IOException { final long ts = EnvironmentEdgeManager.currentTime(); final long seqId = 100; - init(name.getMethodName(), conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)), + init(name, conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)), ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(1).build(), new MyStoreHook() { @Override @@ -1502,7 +1496,7 @@ protected KeyValueHeap newKVHeap(List scanners, public void testCreateScannerAndSnapshotConcurrently() throws IOException, InterruptedException { Configuration conf = HBaseConfiguration.create(); conf.set(HStore.MEMSTORE_CLASS_NAME, MyCompactingMemStore.class.getName()); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); byte[] value = Bytes.toBytes("value"); MemStoreSizing memStoreSizing = new NonThreadSafeMemStoreSizing(); @@ -1539,8 +1533,8 @@ public void testCreateScannerAndSnapshotConcurrently() throws IOException, Inter assertEquals(3, results.size()); for (Cell c : results) { byte[] actualValue = CellUtil.cloneValue(c); - assertTrue("expected:" + Bytes.toStringBinary(value) + ", actual:" - + Bytes.toStringBinary(actualValue), Bytes.equals(actualValue, value)); + assertTrue(Bytes.equals(actualValue, value), "expected:" + Bytes.toStringBinary(value) + + ", actual:" + Bytes.toStringBinary(actualValue)); } } finally { scanner.close(); @@ -1556,7 +1550,7 @@ public void testCreateScannerAndSnapshotConcurrently() throws IOException, Inter public void testScanWithDoubleFlush() throws IOException { Configuration conf = HBaseConfiguration.create(); // Initialize region - MyStore myStore = initMyStore(name.getMethodName(), conf, new MyStoreHook() { + MyStore myStore = initMyStore(name, conf, new MyStoreHook() { @Override public void getScanners(MyStore store) throws IOException { final long tmpId = id++; @@ -1614,8 +1608,8 @@ public void getScanners(MyStore store) throws IOException { assertEquals(3, results.size()); for (Cell c : results) { byte[] actualValue = CellUtil.cloneValue(c); - assertTrue("expected:" + Bytes.toStringBinary(currentValue) + ", actual:" - + Bytes.toStringBinary(actualValue), Bytes.equals(actualValue, currentValue)); + assertTrue(Bytes.equals(actualValue, currentValue), "expected:" + + Bytes.toStringBinary(currentValue) + ", actual:" + Bytes.toStringBinary(actualValue)); } } } @@ -1636,7 +1630,7 @@ public void testStoreScannerUpdateReadersWhenFlushAndCompactConcurrently() throw final CyclicBarrier cyclicBarrier = new CyclicBarrier(2); final AtomicBoolean shouldWaitRef = new AtomicBoolean(false); // Initialize region - final MyStore myStore = initMyStore(name.getMethodName(), conf, new MyStoreHook() { + final MyStore myStore = initMyStore(name, conf, new MyStoreHook() { @Override public void getScanners(MyStore store) throws IOException { try { @@ -1769,8 +1763,8 @@ public void testReclaimChunkWhenScaning() throws IOException { assertEquals(3, results.size()); for (Cell c : results) { byte[] actualValue = CellUtil.cloneValue(c); - assertTrue("expected:" + Bytes.toStringBinary(value) + ", actual:" - + Bytes.toStringBinary(actualValue), Bytes.equals(actualValue, value)); + assertTrue(Bytes.equals(actualValue, value), "expected:" + Bytes.toStringBinary(value) + + ", actual:" + Bytes.toStringBinary(actualValue)); } } } @@ -1791,7 +1785,7 @@ public void testRunDoubleMemStoreCompactors() throws IOException, InterruptedExc conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(flushSize)); // Set the lower threshold to invoke the "MERGE" policy conf.set(MemStoreCompactionStrategy.COMPACTING_MEMSTORE_THRESHOLD_KEY, String.valueOf(0)); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); byte[] value = Bytes.toBytes("thisisavarylargevalue"); MemStoreSizing memStoreSizing = new NonThreadSafeMemStoreSizing(); @@ -1835,8 +1829,8 @@ public void testAge() throws IOException { EnvironmentEdgeManager.injectEdge(edge); Configuration conf = TEST_UTIL.getConfiguration(); ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.of(family); - initHRegion(name.getMethodName(), conf, - TableDescriptorBuilder.newBuilder(TableName.valueOf(table)), hcd, null, false); + initHRegion(name, conf, TableDescriptorBuilder.newBuilder(TableName.valueOf(table)), hcd, null, + false); HStore store = new HStore(region, hcd, conf, false) { @Override @@ -1915,7 +1909,7 @@ public void testSwitchingPreadtoStreamParallelyWithCompactionDischarger() throws conf.set("hbase.hstore.engine.class", DummyStoreEngine.class.getName()); conf.setLong(StoreScanner.STORESCANNER_PREAD_MAX_BYTES, 0); // Set the lower threshold to invoke the "MERGE" policy - MyStore store = initMyStore(name.getMethodName(), conf, new MyStoreHook() { + MyStore store = initMyStore(name, conf, new MyStoreHook() { }); MemStoreSizing memStoreSizing = new NonThreadSafeMemStoreSizing(); long ts = EnvironmentEdgeManager.currentTime(); @@ -1981,7 +1975,7 @@ public void testMaxPreadBytesConfiguredToBeLessThanZero() throws Exception { conf.set("hbase.hstore.engine.class", DummyStoreEngine.class.getName()); // Set 'hbase.storescanner.pread.max.bytes' < 0, so that StoreScanner will be a STREAM type. conf.setLong(StoreScanner.STORESCANNER_PREAD_MAX_BYTES, -1); - MyStore store = initMyStore(name.getMethodName(), conf, new MyStoreHook() { + MyStore store = initMyStore(name, conf, new MyStoreHook() { }); Scan scan = new Scan(); scan.addFamily(family); @@ -1996,7 +1990,7 @@ public void testMaxPreadBytesConfiguredToBeLessThanZero() throws Exception { public void testInMemoryCompactionTypeWithLowerCase() throws IOException, InterruptedException { Configuration conf = HBaseConfiguration.create(); conf.set("hbase.systemtables.compacting.memstore.type", "eager"); - init(name.getMethodName(), conf, + init(name, conf, TableDescriptorBuilder.newBuilder( TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME, "meta".getBytes())), ColumnFamilyDescriptorBuilder.newBuilder(family) @@ -2007,8 +2001,8 @@ public void testInMemoryCompactionTypeWithLowerCase() throws IOException, Interr @Test public void testSpaceQuotaChangeAfterReplacement() throws IOException { - final TableName tn = TableName.valueOf(name.getMethodName()); - init(name.getMethodName()); + final TableName tn = TableName.valueOf(name); + init(name); RegionSizeStoreImpl sizeStore = new RegionSizeStoreImpl(); @@ -2048,7 +2042,7 @@ public void testSpaceQuotaChangeAfterReplacement() throws IOException { @Test public void testHFileContextSetWithCFAndTable() throws Exception { - init(this.name.getMethodName()); + init(name); StoreFileWriter writer = store.getStoreEngine() .createWriter(CreateStoreFileWriterParams.create().maxKeyCount(10000L) .compression(Compression.Algorithm.NONE).isCompaction(true).includeMVCCReadpoint(true) @@ -2080,7 +2074,7 @@ public void testCompactingMemStoreNoCellButDataSizeExceedsInmemoryFlushSize() conf.setDouble(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.005); conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(flushByteSize * 200)); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); MyCompactingMemStore2 myCompactingMemStore = ((MyCompactingMemStore2) store.memstore); @@ -2129,12 +2123,13 @@ public void testCompactingMemStoreNoCellButDataSizeExceedsInmemoryFlushSize() // This test is for HBASE-26210, HBase Write be stuck when there is cell which size exceeds // InmemoryFlushSize - @Test(timeout = 60000) + @Test + @Timeout(value = 60000, unit = TimeUnit.MILLISECONDS) public void testCompactingMemStoreCellExceedInmemoryFlushSize() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set(HStore.MEMSTORE_CLASS_NAME, MyCompactingMemStore6.class.getName()); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); MyCompactingMemStore6 myCompactingMemStore = ((MyCompactingMemStore6) store.memstore); @@ -2186,7 +2181,7 @@ public void testForceCloneOfBigCellForCellChunkImmutableSegment() throws Excepti conf.setBoolean(WALFactory.WAL_ENABLED, false); // Use {@link MemoryCompactionPolicy#EAGER} for always compacting. - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.EAGER).build()); MyCompactingMemStore6 myCompactingMemStore = ((MyCompactingMemStore6) store.memstore); @@ -2292,7 +2287,7 @@ private void doWriteTestLargeCellAndSmallCellConcurrently(IntBinaryOperator getF conf.setDouble(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.005); conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(flushByteSize * 200)); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); MyCompactingMemStore3 myCompactingMemStore = ((MyCompactingMemStore3) store.memstore); @@ -2421,7 +2416,7 @@ public void testFlattenAndSnapshotCompactingMemStoreConcurrently() throws Except conf.setDouble(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.005); conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(flushByteSize * 200)); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); MyCompactingMemStore4 myCompactingMemStore = ((MyCompactingMemStore4) store.memstore); @@ -2524,7 +2519,7 @@ public void testFlattenSnapshotWriteCompactingMemeStoreConcurrently() throws Exc conf.setDouble(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.005); conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(flushByteSize * 200)); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); final MyCompactingMemStore5 myCompactingMemStore = ((MyCompactingMemStore5) store.memstore); @@ -2623,7 +2618,7 @@ public void testClearSnapshotGetScannerConcurrently() throws Exception { conf.set(HStore.MEMSTORE_CLASS_NAME, MyDefaultMemStore.class.getName()); conf.setBoolean(WALFactory.WAL_ENABLED, false); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); MyDefaultMemStore myDefaultMemStore = (MyDefaultMemStore) (store.memstore); myDefaultMemStore.store = store; @@ -2712,7 +2707,7 @@ public void testOnConfigurationChange() throws IOException { .setConfiguration(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, String.valueOf(STORE_MAX_FILES_TO_COMPACT)) .build(); - init(this.name.getMethodName(), conf, hcd); + init(name, conf, hcd); // After updating common configuration, the conf in HStore itself must not be changed. conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, @@ -2741,12 +2736,12 @@ public void testExtendsDefaultMemStore() throws Exception { Configuration conf = HBaseConfiguration.create(); conf.setBoolean(WALFactory.WAL_ENABLED, false); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); assertTrue(this.store.memstore.getClass() == DefaultMemStore.class); tearDown(); conf.set(HStore.MEMSTORE_CLASS_NAME, CustomDefaultMemStore.class.getName()); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); assertTrue(this.store.memstore.getClass() == CustomDefaultMemStore.class); } @@ -2781,7 +2776,7 @@ public void testMemoryLeakWhenFlushMemStoreRetrying() throws Exception { conf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY, MyDefaultStoreFlusher.class.getName()); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family).build()); MyDefaultMemStore1 myDefaultMemStore = (MyDefaultMemStore1) (store.memstore); assertTrue((store.storeEngine.getStoreFlusher()) instanceof MyDefaultStoreFlusher); @@ -2896,7 +2891,7 @@ public void testImmutableMemStoreLABRefCnt() throws Exception { conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(flushByteSize * 200)); conf.setBoolean(WALFactory.WAL_ENABLED, false); - init(name.getMethodName(), conf, ColumnFamilyDescriptorBuilder.newBuilder(family) + init(name, conf, ColumnFamilyDescriptorBuilder.newBuilder(family) .setInMemoryCompaction(MemoryCompactionPolicy.BASIC).build()); final CompactingMemStore myCompactingMemStore = ((CompactingMemStore) store.memstore); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 1580d1e690e6..b320312d17f1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.io.hfile.BlockCompressedSizePredicator.BLOCK_COMPRESSED_SIZE_PREDICATOR; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -91,13 +90,11 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -109,13 +106,10 @@ /** * Test HStoreFile */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestHStoreFile { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHStoreFile.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration()); @@ -125,22 +119,21 @@ public class TestHStoreFile { private static String TEST_FAMILY = "cf"; private static final char FIRST_CHAR = 'a'; private static final char LAST_CHAR = 'z'; - - @Rule - public TestName name = new TestName(); + private String name; private Configuration conf; private Path testDir; private FileSystem fs; - @Before - public void setUp() throws IOException { + @BeforeEach + public void setUp(TestInfo testInfo) throws IOException { + this.name = testInfo.getTestMethod().get().getName(); conf = TEST_UTIL.getConfiguration(); - testDir = TEST_UTIL.getDataTestDir(name.getMethodName()); + testDir = TEST_UTIL.getDataTestDir(name); fs = testDir.getFileSystem(conf); } - @AfterClass + @AfterAll public static void tearDownAfterClass() { TEST_UTIL.cleanupTestDir(); } @@ -149,6 +142,7 @@ public static void tearDownAfterClass() { * Write a file and then assert that we can read from top and bottom halves using two * HalfMapFiles, as well as one HalfMapFile and one HFileLink file. */ + @Test public void testBasicHalfAndHFileLinkMapFile() throws Exception { final RegionInfo hri = @@ -175,8 +169,7 @@ public void testBasicHalfAndHFileLinkMapFile() throws Exception { } private void writeStoreFile(final StoreFileWriter writer) throws IOException { - writeStoreFile(writer, Bytes.toBytes(name.getMethodName()), - Bytes.toBytes(name.getMethodName())); + writeStoreFile(writer, Bytes.toBytes(name), Bytes.toBytes(name)); } // pick an split point (roughly halfway) @@ -300,10 +293,10 @@ public void testStoreFileReference() throws Exception { new StoreFileScanner(r, mock(HFileScanner.class), false, false, 0, 0, false, false); // Verify after instantiating scanner refCount is increased - assertTrue("Verify file is being referenced", file.isReferencedInReads()); + assertTrue(file.isReferencedInReads(), "Verify file is being referenced"); scanner.close(); // Verify after closing scanner refCount is decreased - assertFalse("Verify file is not being referenced", file.isReferencedInReads()); + assertFalse(file.isReferencedInReads(), "Verify file is not being referenced"); } @Test @@ -699,10 +692,10 @@ private void bloomWriteRead(StoreFileWriter writer, FileSystem fs) throws Except } reader.close(true); // evict because we are about to delete the file fs.delete(f, true); - assertEquals("False negatives: " + falseNeg, 0, falseNeg); + assertEquals(0, falseNeg, "False negatives: " + falseNeg); int maxFalsePos = (int) (2 * 2000 * err); - assertTrue("Too many false positives: " + falsePos + " (err=" + err + ", expected no more than " - + maxFalsePos + ")", falsePos <= maxFalsePos); + assertTrue(falsePos <= maxFalsePos, "Too many false positives: " + falsePos + " (err=" + err + + ", expected no more than " + maxFalsePos + ")"); } private static final int BLOCKSIZE_SMALL = 8192; @@ -781,10 +774,10 @@ public void testDeleteFamilyBloomFilter() throws Exception { assertEquals(1000, reader.getDeleteFamilyCnt()); reader.close(true); // evict because we are about to delete the file fs.delete(f, true); - assertEquals("False negatives: " + falseNeg, 0, falseNeg); + assertEquals(0, falseNeg, "False negatives: " + falseNeg); int maxFalsePos = (int) (2 * 2000 * err); - assertTrue("Too many false positives: " + falsePos + " (err=" + err + ", expected no more than " - + maxFalsePos, falsePos <= maxFalsePos); + assertTrue(falsePos <= maxFalsePos, "Too many false positives: " + falsePos + " (err=" + err + + ", expected no more than " + maxFalsePos); } /** @@ -818,7 +811,7 @@ public void testReseek() throws Exception { StoreFileScanner s = getStoreFileScanner(reader, false, false); s.reseek(k); - assertNotNull("Intial reseek should position at the beginning of the file", s.peek()); + assertNotNull(s.peek(), "Intial reseek should position at the beginning of the file"); } @Test @@ -1325,8 +1318,7 @@ private void testDataBlockSizeWithCompressionRatePredicator(int expectedBlockCou // Make a store file and write data to it. StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) .withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build(); - writeLargeStoreFile(writer, Bytes.toBytes(name.getMethodName()), - Bytes.toBytes(name.getMethodName()), 200); + writeLargeStoreFile(writer, Bytes.toBytes(name), Bytes.toBytes(name), 200); writer.close(); StoreFileInfo storeFileInfo = StoreFileInfo.createStoreFileInfoForHFile(conf, fs, writer.getPath(), true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java index 88d094455c88..3c86d1069264 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java @@ -17,11 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver; +import static org.junit.jupiter.api.Assertions.assertNotNull; + import java.io.IOException; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -31,20 +32,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hdfs.DFSClient; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestHdfsSnapshotHRegion { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHdfsSnapshotHRegion.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final String SNAPSHOT_NAME = "foo_snapshot"; private Table table; @@ -53,7 +49,7 @@ public class TestHdfsSnapshotHRegion { private DFSClient client; private String baseDir; - @Before + @BeforeEach public void setUp() throws Exception { Configuration c = TEST_UTIL.getConfiguration(); c.setBoolean("dfs.support.append", true); @@ -69,7 +65,7 @@ public void setUp() throws Exception { client.allowSnapshot(baseDir); } - @After + @AfterEach public void tearDown() throws Exception { client.deleteSnapshot(baseDir, SNAPSHOT_NAME); TEST_UTIL.shutdownMiniCluster(); @@ -82,7 +78,7 @@ public void testOpeningReadOnlyRegionBasic() throws Exception { .getAllRegionLocations().stream().findFirst().get().getRegion(); Path tableDir = CommonFSUtils.getTableDir(new Path(snapshotDir), TABLE_NAME); HRegion snapshottedRegion = openSnapshotRegion(firstRegion, tableDir); - Assert.assertNotNull(snapshottedRegion); + assertNotNull(snapshottedRegion); snapshottedRegion.close(); } @@ -105,7 +101,7 @@ public void testSnapshottingWithTmpSplitsAndMergeDirectoriesPresent() throws Exc // everything should still open just fine HRegion snapshottedRegion = openSnapshotRegion(firstRegion, CommonFSUtils.getTableDir(new Path(snapshotDir), TABLE_NAME)); - Assert.assertNotNull(snapshottedRegion); // no errors and the region should open + assertNotNull(snapshottedRegion); // no errors and the region should open snapshottedRegion.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 9b6a5d80c9ab..3aafb7360eef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.CoordinatedStateManager; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -52,17 +51,13 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestHeapMemoryManager { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHeapMemoryManager.class); - private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); private long maxHeapSize = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestInputStreamBlockDistribution.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestInputStreamBlockDistribution.java index 9adac06b88d3..65c4c1780590 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestInputStreamBlockDistribution.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestInputStreamBlockDistribution.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -32,7 +32,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.io.FileLink; @@ -40,24 +39,20 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestInputStreamBlockDistribution { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestInputStreamBlockDistribution.class); - private Configuration conf; private FileSystem fs; private Path testPath; - @Before + @BeforeEach public void setUp() throws Exception { HBaseTestingUtil testUtil = new HBaseTestingUtil(); conf = testUtil.getConfiguration(); @@ -73,7 +68,7 @@ public void setUp() throws Exception { writeSomeData(fs, testPath, 256 << 20, (byte) 2); } - @After + @AfterEach public void tearDown() throws Exception { fs.delete(testPath, false); fs.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java index 648b06189e2c..81197a3245f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java @@ -21,7 +21,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; @@ -38,30 +37,26 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Test failure in ScanDeleteTracker.isDeleted when ROWCOL bloom filter is used during a scan with a * filter. */ -@Category({ RegionServerTests.class, FilterTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestIsDeleteFailure { - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestIsDeleteFailure.class); - @Rule - public TestName name = new TestName(); + private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + private String name; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 100); TEST_UTIL.getConfiguration().setInt("hbase.client.pause", 250); @@ -70,15 +65,20 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(1); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } + @Test public void testIsDeleteFailure() throws Exception { final TableDescriptor table = - TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(); final byte[] family = Bytes.toBytes("0"); final byte[] c1 = Bytes.toBytes("C01"); final byte[] c2 = Bytes.toBytes("C02"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java index be961bb396d2..710c0cf6c7b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java @@ -17,13 +17,14 @@ */ package org.apache.hadoop.hbase.regionserver; +import static org.junit.jupiter.api.Assertions.assertThrows; + import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.DoNotRetryIOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.StartTestingClusterOption; import org.apache.hadoop.hbase.TableName; @@ -43,13 +44,12 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,13 +64,10 @@ * Test performance improvement of joined scanners optimization: * https://issues.apache.org/jira/browse/HBASE-5416 */ -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestJoinedScanners { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestJoinedScanners.class); - private static final Logger LOG = LoggerFactory.getLogger(TestJoinedScanners.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); @@ -84,11 +81,9 @@ public class TestJoinedScanners { private static DataBlockEncoding blockEncoding = DataBlockEncoding.FAST_DIFF; private static int selectionRatio = 30; private static int valueWidth = 128 * 1024; + private String name; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { final int DEFAULT_BLOCK_SIZE = 1024 * 1024; TEST_UTIL.getConfiguration().setLong("dfs.blocksize", DEFAULT_BLOCK_SIZE); @@ -102,16 +97,21 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(option); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } + @Test public void testJoinedScanners() throws Exception { byte[][] families = { cf_essential, cf_joined }; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(name); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (byte[] family : families) { ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(family) @@ -231,10 +231,10 @@ public static void main(final String[] args) throws Exception { test.testJoinedScanners(); } - @Test(expected = DoNotRetryIOException.class) + @Test public void testWithReverseScan() throws Exception { try (Connection con = TEST_UTIL.getConnection(); Admin admin = con.getAdmin()) { - TableName tableName = TableName.valueOf(name.getMethodName()); + TableName tableName = TableName.valueOf(name); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf1")) @@ -254,7 +254,7 @@ public void testWithReverseScan() throws Exception { try (ResultScanner scanner = table.getScanner(scan)) { // DoNotRetryIOException should occur - scanner.next(); + assertThrows(DoNotRetryIOException.class, scanner::next); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index 85503a56e095..9d90b8b15bc0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.HBaseTestingUtil.COLUMNS; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeepDeletedCells; @@ -48,21 +47,16 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ RegionServerTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestKeepDeletes { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestKeepDeletes.class); - HBaseTestingUtil hbu = new HBaseTestingUtil(); private final byte[] T0 = Bytes.toBytes("0"); private final byte[] T1 = Bytes.toBytes("1"); @@ -74,12 +68,11 @@ public class TestKeepDeletes { private final byte[] c0 = COLUMNS[0]; private final byte[] c1 = COLUMNS[1]; + private String name; - @Rule - public TestName name = new TestName(); - - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + this.name = testInfo.getTestMethod().get().getName(); /* * HBASE-6832: [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on implicit * RS timing. Use an explicit timer (IncrementingEnvironmentEdge) so that the put, delete @@ -91,7 +84,7 @@ public void setUp() throws Exception { EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); } - @After + @AfterEach public void tearDown() throws Exception { EnvironmentEdgeManager.reset(); } @@ -100,10 +93,11 @@ public void tearDown() throws Exception { * Make sure that deleted rows are retained. Family delete markers are deleted. Column Delete * markers are versioned Time range scan of deleted rows are possible */ + @Test public void testBasicScenario() throws Exception { // keep 3 versions, rows do not expire - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 3, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 3, HConstants.FOREVER, KeepDeletedCells.TRUE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -198,7 +192,7 @@ public void testBasicScenario() throws Exception { @Test public void testRawScanWithoutKeepingDeletes() throws Exception { // KEEP_DELETED_CELLS is NOT enabled - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 3, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 3, HConstants.FOREVER, KeepDeletedCells.FALSE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -243,7 +237,7 @@ public void testRawScanWithoutKeepingDeletes() throws Exception { @Test public void testWithoutKeepingDeletes() throws Exception { // KEEP_DELETED_CELLS is NOT enabled - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 3, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 3, HConstants.FOREVER, KeepDeletedCells.FALSE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -296,7 +290,7 @@ public void testWithoutKeepingDeletes() throws Exception { */ @Test public void testRawScanWithColumns() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 3, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 3, HConstants.FOREVER, KeepDeletedCells.TRUE); Region region = hbu.createLocalHRegion(htd, null, null); @@ -320,7 +314,7 @@ public void testRawScanWithColumns() throws Exception { */ @Test public void testRawScan() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 3, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 3, HConstants.FOREVER, KeepDeletedCells.TRUE); Region region = hbu.createLocalHRegion(htd, null, null); @@ -409,7 +403,7 @@ public void testRawScan() throws Exception { */ @Test public void testDeleteMarkerExpirationEmptyStore() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 1, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 1, HConstants.FOREVER, KeepDeletedCells.TRUE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -452,7 +446,7 @@ public void testDeleteMarkerExpirationEmptyStore() throws Exception { */ @Test public void testDeleteMarkerExpiration() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 1, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 1, HConstants.FOREVER, KeepDeletedCells.TRUE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -515,7 +509,7 @@ public void testDeleteMarkerExpiration() throws Exception { */ @Test public void testWithOldRow() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 1, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 1, HConstants.FOREVER, KeepDeletedCells.TRUE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -593,7 +587,7 @@ public void testWithOldRow() throws Exception { */ @Test public void testRanges() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 3, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 3, HConstants.FOREVER, KeepDeletedCells.TRUE); Region region = hbu.createLocalHRegion(htd, null, null); @@ -674,7 +668,7 @@ public void testRanges() throws Exception { */ @Test public void testDeleteMarkerVersioning() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 1, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 1, HConstants.FOREVER, KeepDeletedCells.TRUE); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -767,7 +761,7 @@ public void testDeleteMarkerVersioning() throws Exception { */ @Test public void testWithMixedCFs() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 0, 1, + TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name), 0, 1, HConstants.FOREVER, KeepDeletedCells.TRUE); Region region = hbu.createLocalHRegion(htd, null, null); @@ -818,8 +812,8 @@ public void testWithMixedCFs() throws Exception { */ @Test public void testWithMinVersions() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 3, - 1000, 1, KeepDeletedCells.TRUE); + TableDescriptor htd = + hbu.createTableDescriptor(TableName.valueOf(name), 3, 1000, 1, KeepDeletedCells.TRUE); HRegion region = hbu.createLocalHRegion(htd, null, null); long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past @@ -896,8 +890,8 @@ public void testWithMinVersions() throws Exception { */ @Test public void testWithTTL() throws Exception { - TableDescriptor htd = hbu.createTableDescriptor(TableName.valueOf(name.getMethodName()), 1, - 1000, 1, KeepDeletedCells.TTL); + TableDescriptor htd = + hbu.createTableDescriptor(TableName.valueOf(name), 1, 1000, 1, KeepDeletedCells.TTL); HRegion region = hbu.createLocalHRegion(htd, null, null); long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java index 2ee0645f1573..2a6f2fc4e1c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -31,23 +31,18 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CollectionBackedScanner; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestKeyValueHeap { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestKeyValueHeap.class); - private byte[] row1 = Bytes.toBytes("row1"); private byte[] fam1 = Bytes.toBytes("fam1"); private byte[] col1 = Bytes.toBytes("col1"); @@ -130,8 +125,8 @@ public void testSeek() throws IOException { List actual = Arrays.asList(kvh.peek()); - assertEquals("Expected = " + Arrays.toString(expected.toArray()) + "\n Actual = " - + Arrays.toString(actual.toArray()), expected, actual); + assertEquals(expected, actual, "Expected = " + Arrays.toString(expected.toArray()) + + "\n Actual = " + Arrays.toString(actual.toArray())); } } @@ -242,8 +237,8 @@ public void testGetFilesRead() throws IOException { // Verify that before closing, files are not returned Set filesReadBeforeClose = keyValueHeap.getFilesRead(); - assertTrue("Should return empty set before closing heap", filesReadBeforeClose.isEmpty()); - assertEquals("Should have 0 files before closing", 0, filesReadBeforeClose.size()); + assertTrue(filesReadBeforeClose.isEmpty(), "Should return empty set before closing heap"); + assertEquals(0, filesReadBeforeClose.size(), "Should have 0 files before closing"); // Now close the heap keyValueHeap.close(); @@ -251,16 +246,16 @@ public void testGetFilesRead() throws IOException { // After closing, should return all files from file-based scanners only // Non-file-based scanners (like memstore) should not contribute files Set filesReadAfterClose = keyValueHeap.getFilesRead(); - assertEquals("Should return set with 3 file paths after closing (excluding non-file scanner)", - 3, filesReadAfterClose.size()); - assertTrue("Should contain file1", filesReadAfterClose.contains(file1)); - assertTrue("Should contain file2", filesReadAfterClose.contains(file2)); - assertTrue("Should contain file3", filesReadAfterClose.contains(file3)); + assertEquals(3, filesReadAfterClose.size(), + "Should return set with 3 file paths after closing (excluding non-file scanner)"); + assertTrue(filesReadAfterClose.contains(file1), "Should contain file1"); + assertTrue(filesReadAfterClose.contains(file2), "Should contain file2"); + assertTrue(filesReadAfterClose.contains(file3), "Should contain file3"); // Verify that non-file-based scanner doesn't contribute any files // (memStoreScanner.getFilesRead() should return empty set) Set memStoreFiles = memStoreScanner.getFilesRead(); - assertTrue("Non-file-based scanner should return empty set", memStoreFiles.isEmpty()); + assertTrue(memStoreFiles.isEmpty(), "Non-file-based scanner should return empty set"); } private static class TestScanner extends CollectionBackedScanner { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java index b3562b08d52c..1552465fa988 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java @@ -17,30 +17,25 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestKeyValueScanFixture { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestKeyValueScanFixture.class); - @Test public void testKeyValueScanFixture() throws IOException { KeyValue kvs[] = new KeyValue[] { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestLogRoller.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestLogRoller.java index 70b0391f02b6..d498745f9cc3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestLogRoller.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestLogRoller.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.HashMap; import java.util.Iterator; @@ -28,27 +28,22 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.wal.FSHLog; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestLogRoller { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLogRoller.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final int LOG_ROLL_PERIOD = 20 * 1000; @@ -60,7 +55,7 @@ public class TestLogRoller { private static Path ROOT_DIR; private static FileSystem FS; - @Before + @BeforeEach public void setUp() throws Exception { CONF = TEST_UTIL.getConfiguration(); CONF.setInt("hbase.regionserver.logroll.period", LOG_ROLL_PERIOD); @@ -74,7 +69,7 @@ public void setUp() throws Exception { ROLLER.start(); } - @After + @AfterEach public void tearDown() throws Exception { ROLLER.close(); FS.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index 9ec1ed809316..7cb842e67b53 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -21,9 +21,9 @@ import static org.apache.hadoop.hbase.HBaseTestingUtil.START_KEY_BYTES; import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1; import static org.apache.hadoop.hbase.regionserver.Store.PRIORITY_USER; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -32,10 +32,11 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTestConst; @@ -60,40 +61,33 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test major compactions */ -@Category({ RegionServerTests.class, LargeTests.class }) -@RunWith(Parameterized.class) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: compType={0}") public class TestMajorCompaction { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMajorCompaction.class); - - @Parameterized.Parameters - public static Object[] data() { - return new Object[] { "NONE", "BASIC", "EAGER" }; + public static Stream parameters() { + return Stream.of("NONE", "BASIC", "EAGER").map(Arguments::of); } - @Rule - public TestName name; private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName()); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); protected Configuration conf = UTIL.getConfiguration(); + private String name; + private HRegion r = null; private TableDescriptor htd = null; private static final byte[] COLUMN_FAMILY = fam1; @@ -106,7 +100,6 @@ public static Object[] data() { /** constructor */ public TestMajorCompaction(String compType) { super(); - name = new TestName(); // Set cache flush size to 1MB conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024); conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100); @@ -121,16 +114,17 @@ public TestMajorCompaction(String compType) { (byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2); } - @Before - public void setUp() throws Exception { - this.htd = UTIL.createTableDescriptor( - TableName.valueOf(name.getMethodName().replace('[', 'i').replace(']', 'i')), - ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, - ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + this.name = testInfo.getTestMethod().get().getName(); + this.htd = + UTIL.createTableDescriptor(TableName.valueOf(name.replace('[', 'i').replace(']', 'i')), + ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, + ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); this.r = UTIL.createLocalHRegion(htd, null, null); } - @After + @AfterEach public void tearDown() throws Exception { WAL wal = ((HRegion) r).getWAL(); ((HRegion) r).close(); @@ -143,7 +137,7 @@ public void tearDown() throws Exception { * basically works. * @throws IOException exception encountered */ - @Test + @TestTemplate public void testMajorCompactingToNoOutput() throws IOException { testMajorCompactingWithDeletes(KeepDeletedCells.FALSE); } @@ -152,7 +146,7 @@ public void testMajorCompactingToNoOutput() throws IOException { * Test that on a major compaction,Deleted cells are retained if keep deleted cells is set to true * @throws IOException exception encountered */ - @Test + @TestTemplate public void testMajorCompactingWithKeepDeletedCells() throws IOException { testMajorCompactingWithDeletes(KeepDeletedCells.TRUE); } @@ -160,17 +154,17 @@ public void testMajorCompactingWithKeepDeletedCells() throws IOException { /** * Run compaction and flushing memstore Assert deletes get cleaned up. */ - @Test + @TestTemplate public void testMajorCompaction() throws Exception { majorCompaction(); } - @Test + @TestTemplate public void testDataBlockEncodingInCacheOnly() throws Exception { majorCompactionWithDataBlockEncoding(true); } - @Test + @TestTemplate public void testDataBlockEncodingEverywhere() throws Exception { majorCompactionWithDataBlockEncoding(false); } @@ -220,8 +214,8 @@ private void majorCompaction() throws Exception { result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); LOG.debug( "Row " + Bytes.toStringBinary(secondRowBytes) + " after " + "initial compaction: " + result); - assertEquals("Invalid number of versions of row " + Bytes.toStringBinary(secondRowBytes) + ".", - compactionThreshold, result.size()); + assertEquals(compactionThreshold, result.size(), + "Invalid number of versions of row " + Bytes.toStringBinary(secondRowBytes) + "."); // Now add deletes to memstore and then flush it. // That will put us over @@ -236,26 +230,26 @@ private void majorCompaction() throws Exception { // Assert deleted. result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); - assertTrue("Second row should have been deleted", result.isEmpty()); + assertTrue(result.isEmpty(), "Second row should have been deleted"); r.flush(true); result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); - assertTrue("Second row should have been deleted", result.isEmpty()); + assertTrue(result.isEmpty(), "Second row should have been deleted"); // Add a bit of data and flush. Start adding at 'bbb'. createSmallerStoreFile(this.r); r.flush(true); // Assert that the second row is still deleted. result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); - assertTrue("Second row should still be deleted", result.isEmpty()); + assertTrue(result.isEmpty(), "Second row should still be deleted"); // Force major compaction. r.compact(true); assertEquals(1, r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size()); result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); - assertTrue("Second row should still be deleted", result.isEmpty()); + assertTrue(result.isEmpty(), "Second row should still be deleted"); // Make sure the store files do have some 'aaa' keys in them -- exactly 3. // Also, that compacted store files do not have any secondRowBytes because @@ -274,10 +268,10 @@ private void majorCompaction() throws Exception { r.compact(true); int count = count(); - assertEquals("Should not see anything after TTL has expired", 0, count); + assertEquals(0, count, "Should not see anything after TTL has expired"); } - @Test + @TestTemplate public void testTimeBasedMajorCompaction() throws Exception { // create 2 storefiles and force a major compaction to reset the time int delay = 10 * 1000; // 10 sec @@ -383,7 +377,7 @@ private void createSmallerStoreFile(final HRegion region) throws IOException { /** * Test for HBASE-5920 - Test user requested major compactions always occurring */ - @Test + @TestTemplate public void testNonUserMajorCompactionRequest() throws Exception { HStore store = r.getStore(COLUMN_FAMILY); createStoreFile(r); @@ -393,16 +387,15 @@ public void testNonUserMajorCompactionRequest() throws Exception { store.triggerMajorCompaction(); CompactionRequestImpl request = store.requestCompaction().get().getRequest(); - assertNotNull("Expected to receive a compaction request", request); - assertEquals( - "System-requested major compaction should not occur if there are too many store files", false, - request.isMajor()); + assertNotNull(request, "Expected to receive a compaction request"); + assertEquals(false, request.isMajor(), + "System-requested major compaction should not occur if there are too many store files"); } /** * Test for HBASE-5920 */ - @Test + @TestTemplate public void testUserMajorCompactionRequest() throws IOException { HStore store = r.getStore(COLUMN_FAMILY); createStoreFile(r); @@ -412,10 +405,9 @@ public void testUserMajorCompactionRequest() throws IOException { store.triggerMajorCompaction(); CompactionRequestImpl request = store .requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get().getRequest(); - assertNotNull("Expected to receive a compaction request", request); - assertEquals( - "User-requested major compaction should always occur, even if there are too many store files", - true, request.isMajor()); + assertNotNull(request, "Expected to receive a compaction request"); + assertEquals(true, request.isMajor(), + "User-requested major compaction should always occur, even if there are too many store files"); } /** @@ -423,7 +415,7 @@ public void testUserMajorCompactionRequest() throws IOException { * product. Make sure scanner over region returns right answer in this case - and that it just * basically works. */ - @Test + @TestTemplate public void testMajorCompactingToNoOutputWithReverseScan() throws IOException { createStoreFile(r); for (int i = 0; i < compactionThreshold; i++) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultsPerColumnFamily.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultsPerColumnFamily.java index f90374295a0d..569ec7f2815b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultsPerColumnFamily.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultsPerColumnFamily.java @@ -17,9 +17,8 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; @@ -33,21 +32,16 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category(MediumTests.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +@Tag(MediumTests.TAG) public class TestMaxResultsPerColumnFamily { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMaxResultsPerColumnFamily.class); - private static final byte[][] FAMILIES = { Bytes.toBytes("1"), Bytes.toBytes("2") }; private static final byte[][] VALUES = @@ -55,22 +49,26 @@ public class TestMaxResultsPerColumnFamily { private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { UTIL.startMiniCluster(1); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Rule - public TestName name = new TestName(); + private String name; + + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } @Test public void testSetMaxResultsPerColumnFamilySimple() throws Exception { - TableName tableName = TableName.valueOf(name.getMethodName()); + TableName tableName = TableName.valueOf(name); Admin admin = UTIL.getAdmin(); ColumnFamilyDescriptorBuilder cfBuilder0 = ColumnFamilyDescriptorBuilder.newBuilder(FAMILIES[0]); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java index 17e1e0557d01..e35694e396c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ByteBufferKeyValue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.exceptions.UnexpectedStateException; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; @@ -40,28 +39,24 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test the {@link org.apache.hadoop.hbase.regionserver.ChunkCreator.MemStoreChunkPool} class */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMemStoreChunkPool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMemStoreChunkPool.class); - private final static Configuration conf = new Configuration(); private static ChunkCreator chunkCreator; private static boolean chunkPoolDisabledBeforeTest; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { conf.setBoolean(MemStoreLAB.USEMSLAB_KEY, true); conf.setFloat(MemStoreLAB.CHUNK_POOL_MAXSIZE_KEY, 0.2f); @@ -76,12 +71,12 @@ public static void setUpBeforeClass() throws Exception { assertNotNull(chunkCreator); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { ChunkCreator.chunkPoolDisabled = chunkPoolDisabledBeforeTest; } - @After + @AfterEach public void tearDown() throws Exception { chunkCreator.clearChunksInPool(); } @@ -106,8 +101,8 @@ public void testReusingChunks() { lastBuffer = newKv.getBuffer(); } assertEquals(expectedOff, newKv.getOffset()); - assertTrue("Allocation overruns buffer", - newKv.getOffset() + size <= newKv.getBuffer().capacity()); + assertTrue(newKv.getOffset() + size <= newKv.getBuffer().capacity(), + "Allocation overruns buffer"); expectedOff += size; } // chunks will be put back to pool after close diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreFlusher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreFlusher.java index 158dd91d9a06..05cfda4ab702 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreFlusher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreFlusher.java @@ -17,41 +17,35 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Threads; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ RegionServerTests.class, SmallTests.class }) -public class TestMemStoreFlusher { +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMemStoreFlusher.class); +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) +public class TestMemStoreFlusher { - @Rule - public TestName name = new TestName(); + private String name; public MemStoreFlusher msf; - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + this.name = testInfo.getTestMethod().get().getName(); Configuration conf = new Configuration(); conf.set("hbase.hstore.flusher.count", "0"); msf = new MemStoreFlusher(conf, null); @@ -59,8 +53,8 @@ public void setUp() throws Exception { @Test public void testReplaceDelayedFlushEntry() { - RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setRegionId(1).setReplicaId(0).build(); + RegionInfo hri = + RegionInfoBuilder.newBuilder(TableName.valueOf(name)).setRegionId(1).setReplicaId(0).build(); HRegion r = mock(HRegion.class); doReturn(hri).when(r).getRegionInfo(); @@ -77,8 +71,8 @@ public void testReplaceDelayedFlushEntry() { @Test public void testNotReplaceDelayedFlushEntryWhichExpired() { - RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setRegionId(1).setReplicaId(0).build(); + RegionInfo hri = + RegionInfoBuilder.newBuilder(TableName.valueOf(name)).setRegionId(1).setReplicaId(0).build(); HRegion r = mock(HRegion.class); doReturn(hri).when(r).getRegionInfo(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java index cd91d39c77b0..dbf8e4cbc891 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java @@ -19,9 +19,9 @@ import static org.apache.hadoop.hbase.regionserver.MemStoreLAB.CHUNK_SIZE_KEY; import static org.apache.hadoop.hbase.regionserver.MemStoreLAB.MAX_ALLOC_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.lang.management.ManagementFactory; import java.nio.ByteBuffer; @@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MultithreadedTestUtil; @@ -48,37 +47,33 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Maps; import org.apache.hbase.thirdparty.com.google.common.primitives.Ints; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestMemStoreLAB { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMemStoreLAB.class); - private final static Configuration conf = new Configuration(); private static final byte[] rk = Bytes.toBytes("r1"); private static final byte[] cf = Bytes.toBytes("f"); private static final byte[] q = Bytes.toBytes("q"); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { ChunkCreator.initialize(1 * 1024, false, 50 * 1024000L, 0.2f, MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { long globalMemStoreLimit = (long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() @@ -111,12 +106,12 @@ public void testLABRandomAllocation() { expectedOff = Bytes.SIZEOF_INT; lastBuffer = newKv.getBuffer(); int chunkId = newKv.getBuffer().getInt(0); - assertTrue("chunkid should be different", chunkId != lastChunkId); + assertTrue(chunkId != lastChunkId, "chunkid should be different"); lastChunkId = chunkId; } assertEquals(expectedOff, newKv.getOffset()); - assertTrue("Allocation overruns buffer", - newKv.getOffset() + size <= newKv.getBuffer().capacity()); + assertTrue(newKv.getOffset() + size <= newKv.getBuffer().capacity(), + "Allocation overruns buffer"); expectedOff += size; } } @@ -126,7 +121,7 @@ public void testLABLargeAllocation() { MemStoreLAB mslab = new MemStoreLABImpl(); KeyValue kv = new KeyValue(rk, cf, q, new byte[2 * 1024 * 1024]); Cell newCell = mslab.copyCellInto(kv); - assertNull("2MB allocation shouldn't be satisfied by LAB.", newCell); + assertNull(newCell, "2MB allocation shouldn't be satisfied by LAB."); } /** @@ -181,9 +176,9 @@ public void doAnAction() throws Exception { mapsByChunk.put(rec.alloc, mapForThisByteArray); } AllocRecord oldVal = mapForThisByteArray.put(rec.offset, rec); - assertNull("Already had an entry " + oldVal + " for allocation " + rec, oldVal); + assertNull(oldVal, "Already had an entry " + oldVal + " for allocation " + rec); } - assertEquals("Sanity check test", sizeCounted, totalAllocated.get()); + assertEquals(sizeCounted, totalAllocated.get(), "Sanity check test"); // Now check each byte array to make sure allocations don't overlap for (Map allocsInChunk : mapsByChunk.values()) { @@ -192,8 +187,8 @@ public void doAnAction() throws Exception { int expectedOff = Bytes.SIZEOF_INT; for (AllocRecord alloc : allocsInChunk.values()) { assertEquals(expectedOff, alloc.offset); - assertTrue("Allocation overruns buffer", - alloc.offset + alloc.size <= alloc.alloc.capacity()); + assertTrue(alloc.offset + alloc.size <= alloc.alloc.capacity(), + "Allocation overruns buffer"); expectedOff += alloc.size; } } @@ -257,18 +252,17 @@ public void testLABChunkQueue() throws Exception { } } // none of the chunkIds would have been returned back - assertTrue("All the chunks must have been cleared", - ChunkCreator.instance.numberOfMappedChunks() != 0); + assertTrue(ChunkCreator.instance.numberOfMappedChunks() != 0, + "All the chunks must have been cleared"); Set chunkIds = new HashSet(mslab.chunks); int pooledChunksNum = mslab.getPooledChunks().size(); // close the mslab mslab.close(); // make sure all chunks where reclaimed back to pool int queueLength = mslab.getNumOfChunksReturnedToPool(chunkIds); - assertTrue( + assertTrue(pooledChunksNum - queueLength == 0, "All chunks in chunk queue should be reclaimed or removed" - + " after mslab closed but actually: " + (pooledChunksNum - queueLength), - pooledChunksNum - queueLength == 0); + + " after mslab closed but actually: " + (pooledChunksNum - queueLength)); } finally { ChunkCreator.instance = oldInstance; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java index e64b4cc60471..e91e82aceb77 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Arrays; @@ -27,7 +27,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -42,23 +41,19 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test the {@link MemStoreCompactorSegmentsIterator} and {@link MemStoreMergerSegmentsIterator} * class, Test for bug : HBASE-22324 */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMemStoreSegmentsIterator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMemStoreSegmentsIterator.class); - private static String TABLE = "test_mscsi"; private static String FAMILY = "f"; private static String COLUMN = "c"; @@ -71,7 +66,7 @@ public class TestMemStoreSegmentsIterator { private HRegion region; private HStore store; - @Before + @BeforeEach public void setup() throws IOException { Configuration conf = new Configuration(); HBaseTestingUtil hbaseUtility = new HBaseTestingUtil(conf); @@ -144,7 +139,7 @@ protected void verifyNext(MemStoreSegmentsIterator iterator) { assertEquals(GREATER_THAN_INTEGER_MAX_VALUE_SEQ_ID, secondCell.getSequenceId()); } - @After + @AfterEach public void tearDown() throws Exception { EnvironmentEdgeManagerTestHelper.reset(); if (region != null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemstoreLABWithoutPool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemstoreLABWithoutPool.java index 9b32558edf12..f89c78f28dd4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemstoreLABWithoutPool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemstoreLABWithoutPool.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.lang.management.ManagementFactory; import java.nio.ByteBuffer; @@ -29,33 +29,28 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Ignore // See HBASE-19742 for issue on reenabling. -@Category({ RegionServerTests.class, SmallTests.class }) +@Disabled // See HBASE-19742 for issue on reenabling. +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMemstoreLABWithoutPool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMemstoreLABWithoutPool.class); - private final static Configuration conf = new Configuration(); private static final byte[] rk = Bytes.toBytes("r1"); private static final byte[] cf = Bytes.toBytes("f"); private static final byte[] q = Bytes.toBytes("q"); - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { long globalMemStoreLimit = (long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * 0.8); @@ -89,12 +84,12 @@ public void testLABRandomAllocation() { expectedOff = Bytes.SIZEOF_INT; lastBuffer = newKv.getBuffer(); int chunkId = newKv.getBuffer().getInt(0); - assertTrue("chunkid should be different", chunkId != lastChunkId); + assertTrue(chunkId != lastChunkId, "chunkid should be different"); lastChunkId = chunkId; } assertEquals(expectedOff, newKv.getOffset()); - assertTrue("Allocation overruns buffer", - newKv.getOffset() + size <= newKv.getBuffer().capacity()); + assertTrue(newKv.getOffset() + size <= newKv.getBuffer().capacity(), + "Allocation overruns buffer"); expectedOff += size; } } @@ -148,8 +143,8 @@ public void testLABChunkQueueWithMultipleMSLABs() throws Exception { mslab[i].close(); } // all of the chunkIds would have been returned back - assertTrue("All the chunks must have been cleared", - ChunkCreator.instance.numberOfMappedChunks() == 0); + assertTrue(ChunkCreator.instance.numberOfMappedChunks() == 0, + "All the chunks must have been cleared"); } private Thread getChunkQueueTestThread(final MemStoreLABImpl mslab, String threadName, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java index 59f16f56dca8..279e1f8b49e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasProperty; import static org.hamcrest.Matchers.not; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -35,10 +35,9 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.TableNameTestRule; +import org.apache.hadoop.hbase.TableNameTestExtension; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; @@ -55,41 +54,37 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestMergesSplitsAddToTracker { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMergesSplitsAddToTracker.class); - private static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final String FAMILY_NAME_STR = "info"; private static final byte[] FAMILY_NAME = Bytes.toBytes(FAMILY_NAME_STR); - @Rule - public TableNameTestRule name = new TableNameTestRule(); + @RegisterExtension + public TableNameTestExtension name = new TableNameTestExtension(); - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setup() { StoreFileTrackerForTest.clear(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsHeapMemoryManager.java index 974d6485377e..5d44de02ae8d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsHeapMemoryManager.java @@ -17,35 +17,30 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Unit test version of rs metrics tests. */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsHeapMemoryManager { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsHeapMemoryManager.class); - public static MetricsAssertHelper HELPER = CompatibilitySingletonFactory.getInstance(MetricsAssertHelper.class); private MetricsHeapMemoryManager hmm; private MetricsHeapMemoryManagerSource source; - @Before + @BeforeEach public void setUp() { hmm = new MetricsHeapMemoryManager(); source = hmm.getMetricsSource(); @@ -53,7 +48,7 @@ public void setUp() { @Test public void testConstuctor() { - assertNotNull("There should be a hadoop1/hadoop2 metrics source", source); + assertNotNull(source, "There should be a hadoop1/hadoop2 metrics source"); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsJvm.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsJvm.java index 6c1ec1dc0acb..7fe87f11d252 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsJvm.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsJvm.java @@ -17,17 +17,16 @@ */ package org.apache.hadoop.hbase.regionserver; -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.net.HttpURLConnection; import java.net.URL; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.testclassification.MetricsTests; @@ -38,22 +37,19 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MetricsTests.class, SmallTests.class }) +@Tag(MetricsTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsJvm { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsJvm.class); private final static HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static Configuration conf; - @BeforeClass + @BeforeAll public static void before() throws Exception { conf = UTIL.getConfiguration(); // The master info server does not run in tests by default. @@ -62,7 +58,7 @@ public static void before() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void after() throws Exception { UTIL.shutdownMiniCluster(); } @@ -102,12 +98,12 @@ private Pair getUrlContent(URL url) throws Exception { private void assertReFind(String re, String value) { Pattern p = Pattern.compile(re); Matcher m = p.matcher(value); - assertTrue("'" + p + "' does not match " + value, m.find()); + assertTrue(m.find(), "'" + p + "' does not match " + value); } private void assertNotFind(String re, String value) { Pattern p = Pattern.compile(re); Matcher m = p.matcher(value); - assertFalse("'" + p + "' should not match " + value, m.find()); + assertFalse(m.find(), "'" + p + "' should not match " + value); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java index 382aed2ea941..87b2994934f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java @@ -19,21 +19,16 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsRegion { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsRegion.class); - public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java index 8409c57912c9..55bb994f5123 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.metrics.MetricRegistries; import org.apache.hadoop.hbase.quotas.RpcThrottlingException; import org.apache.hadoop.hbase.regionserver.metrics.MetricsTableRequests; @@ -33,23 +32,19 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.JvmPauseMonitor; -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Unit test version of rs metrics tests. */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsRegionServer { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsRegionServer.class); - public static MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class); @@ -57,19 +52,19 @@ public class TestMetricsRegionServer { private MetricsRegionServer rsm; private MetricsRegionServerSource serverSource; - @BeforeClass + @BeforeAll public static void classSetUp() { HELPER.init(); } - @Before + @BeforeEach public void setUp() { wrapper = new MetricsRegionServerWrapperStub(); rsm = new MetricsRegionServer(wrapper, new Configuration(false), null); serverSource = rsm.getMetricsSource(); } - @After + @AfterEach public void tearDown() { // Clean up global registries after each test to avoid interference MetricRegistries.global().clear(); @@ -160,9 +155,9 @@ public void testWrapperSource() { @Test public void testConstuctor() { - assertNotNull("There should be a hadoop1/hadoop2 metrics source", rsm.getMetricsSource()); - assertNotNull("The RegionServerMetricsWrapper should be accessable", - rsm.getRegionServerWrapper()); + assertNotNull(rsm.getMetricsSource(), "There should be a hadoop1/hadoop2 metrics source"); + assertNotNull(rsm.getRegionServerWrapper(), + "The RegionServerMetricsWrapper should be accessable"); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServerAggregate.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServerAggregate.java index 3b63541758ba..33ac8a190a85 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServerAggregate.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServerAggregate.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -27,7 +27,6 @@ import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; @@ -40,21 +39,17 @@ import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALProvider; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.stubbing.Answer; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ SmallTests.class, RegionServerTests.class }) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) public class TestMetricsRegionServerAggregate { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsRegionServerAggregate.class); - @Test public void test() { AtomicInteger retVal = new AtomicInteger(0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java index 77aa93879607..d64c0186a4bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionWrapperTableDescriptorHash.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; @@ -35,29 +34,25 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsRegionWrapperTableDescriptorHash { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsRegionWrapperTableDescriptorHash.class); - private HBaseTestingUtil testUtil; private Configuration conf; - @Before + @BeforeEach public void setUp() throws Exception { conf = HBaseConfiguration.create(); testUtil = new HBaseTestingUtil(conf); } - @After + @AfterEach public void tearDown() throws Exception { if (testUtil != null) { testUtil.cleanupTestDir(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableAggregate.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableAggregate.java index bee3fe7c363c..9d45df4fd59b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableAggregate.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableAggregate.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.concurrent.CyclicBarrier; @@ -27,25 +27,20 @@ import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestMetricsTableAggregate { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsTableAggregate.class); - private static final Logger LOG = LoggerFactory.getLogger(TestMetricsTableAggregate.class); private static MetricsAssertHelper HELPER = @@ -60,12 +55,12 @@ public class TestMetricsTableAggregate { private MetricsRegionServer rsm; private MetricsTableAggregateSource agg; - @BeforeClass + @BeforeAll public static void classSetUp() { HELPER.init(); } - @Before + @BeforeEach public void setUp() { tableWrapper = new MetricsTableWrapperStub(tableName); mt = new MetricsTable(tableWrapper); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableMetricsMap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableMetricsMap.java index 2f6dc3db0b87..4d45e6e65f6b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableMetricsMap.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableMetricsMap.java @@ -17,25 +17,20 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsTableMetricsMap { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsTableMetricsMap.class); - private String tableName = "testTableMetricsMap"; private MetricsTableWrapperStub tableWrapper; @@ -44,7 +39,7 @@ public class TestMetricsTableMetricsMap { private MetricsRegionServer rsm; private MetricsTableAggregateSourceImpl agg; - @Before + @BeforeEach public void setUp() { Configuration conf = new Configuration(); @@ -68,7 +63,7 @@ public void testMetricsMap() throws InterruptedException { rsm.updateCompaction(tableName, true, 100, 200, 300, 400, 500); int metricsMapSize = agg.getMetricsRegistry().getMetricsMap().size(); - assertTrue("table metrics added then metricsMapSize should larger than 0", metricsMapSize > 0); + assertTrue(metricsMapSize > 0, "table metrics added then metricsMapSize should larger than 0"); // just for metrics update Thread.sleep(1000); @@ -76,6 +71,6 @@ public void testMetricsMap() throws InterruptedException { agg.deleteTableSource(tableName); metricsMapSize = agg.getMetricsRegistry().getMetricsMap().size(); - assertEquals("table metrics all deleted then metricsSize should be 0", 0, metricsMapSize); + assertEquals(0, metricsMapSize, "table metrics all deleted then metricsSize should be 0"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java index 8c47c96f1009..46b12c22febe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Optional; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.metrics.Metric; import org.apache.hadoop.hbase.metrics.MetricRegistries; @@ -35,27 +34,23 @@ import org.apache.hadoop.hbase.regionserver.metrics.MetricsTableRequests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMetricsTableRequests { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsTableRequests.class); - @Test public void testMetricsTableLatencies() { TableName tn1 = TableName.valueOf("table1"); TableName tn2 = TableName.valueOf("table2"); MetricsTableRequests requests1 = new MetricsTableRequests(tn1, new Configuration()); MetricsTableRequests requests2 = new MetricsTableRequests(tn2, new Configuration()); - assertTrue("'requests' is actually " + requests1.getClass(), - requests1 instanceof MetricsTableRequests); - assertTrue("'requests' is actually " + requests2.getClass(), - requests2 instanceof MetricsTableRequests); + assertTrue(requests1 instanceof MetricsTableRequests, + "'requests' is actually " + requests1.getClass()); + assertTrue(requests2 instanceof MetricsTableRequests, + "'requests' is actually " + requests2.getClass()); MetricRegistryInfo info1 = requests1.getMetricRegistryInfo(); MetricRegistryInfo info2 = requests2.getMetricRegistryInfo(); @@ -101,8 +96,8 @@ public void testTableQueryMeterSwitch() { // disable assertFalse(enableTableQueryMeter); MetricsTableRequests requests = new MetricsTableRequests(tn1, conf); - assertTrue("'requests' is actually " + requests.getClass(), - requests instanceof MetricsTableRequests); + assertTrue(requests instanceof MetricsTableRequests, + "'requests' is actually " + requests.getClass()); MetricRegistryInfo info = requests.getMetricRegistryInfo(); Optional registry = MetricRegistries.global().get(info); @@ -118,8 +113,8 @@ public void testTableQueryMeterSwitch() { MetricsTableRequests.ENABLE_TABLE_QUERY_METER_METRICS_KEY_DEFAULT); assertTrue(enableTableQueryMeter); requests = new MetricsTableRequests(tn1, conf); - assertTrue("'requests' is actually " + requests.getClass(), - requests instanceof MetricsTableRequests); + assertTrue(requests instanceof MetricsTableRequests, + "'requests' is actually " + requests.getClass()); info = requests.getMetricRegistryInfo(); registry = MetricRegistries.global().get(info); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsUserAggregate.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsUserAggregate.java index 8e1d126bd5cc..18b4ff745635 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsUserAggregate.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsUserAggregate.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.security.PrivilegedAction; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.regionserver.metrics.MetricsTableRequests; @@ -33,19 +32,15 @@ import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) public class TestMetricsUserAggregate { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetricsUserAggregate.class); - private static MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class); @@ -54,12 +49,12 @@ public class TestMetricsUserAggregate { private MetricsUserAggregate userAgg; private TableName tableName = TableName.valueOf("testUserAggregateMetrics"); - @BeforeClass + @BeforeAll public static void classSetUp() { HELPER.init(); } - @Before + @BeforeEach public void setUp() { wrapper = new MetricsRegionServerWrapperStub(); Configuration conf = HBaseConfiguration.create(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java index 17a33f618786..0ae08267ce82 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java @@ -18,15 +18,14 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.HBaseTestingUtil.COLUMNS; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.TableName; @@ -44,23 +43,18 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Test Minimum Versions feature (HBASE-4071). */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestMinVersions { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMinVersions.class); - HBaseTestingUtil hbu = new HBaseTestingUtil(); private final byte[] T0 = Bytes.toBytes("0"); private final byte[] T1 = Bytes.toBytes("1"); @@ -70,21 +64,24 @@ public class TestMinVersions { private final byte[] T5 = Bytes.toBytes("5"); private final byte[] c0 = COLUMNS[0]; - - @Rule - public TestName name = new TestName(); + private String name; /** * Verify behavior of getClosestBefore(...) */ + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } + @Test public void testGetClosestBefore() throws Exception { ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(c0).setMinVersions(1) .setMaxVersions(1000).setTimeToLive(1).setKeepDeletedCells(KeepDeletedCells.FALSE).build(); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(cfd).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily(cfd).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); try { @@ -136,8 +133,8 @@ public void testStoreMemStore() throws Exception { ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(c0).setMinVersions(3) .setMaxVersions(1000).setTimeToLive(1).setKeepDeletedCells(KeepDeletedCells.FALSE).build(); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(cfd).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily(cfd).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); // 2s in the past @@ -195,8 +192,8 @@ public void testDelete() throws Exception { ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(c0).setMinVersions(3) .setMaxVersions(1000).setTimeToLive(1).setKeepDeletedCells(KeepDeletedCells.FALSE).build(); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(cfd).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily(cfd).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -258,8 +255,8 @@ public void testMemStore() throws Exception { ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(c0).setMinVersions(2) .setMaxVersions(1000).setTimeToLive(1).setKeepDeletedCells(KeepDeletedCells.FALSE).build(); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(cfd).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily(cfd).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); // 2s in the past @@ -337,8 +334,8 @@ public void testBaseCase() throws Exception { ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(c0).setMinVersions(2) .setMaxVersions(1000).setTimeToLive(1).setKeepDeletedCells(KeepDeletedCells.FALSE).build(); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(cfd).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily(cfd).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); try { @@ -438,8 +435,8 @@ public void testFilters() throws Exception { cfdList.add(cfd); cfdList.add(cfd2); - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamilies(cfdList).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamilies(cfdList).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); // 2s in the past @@ -525,8 +522,8 @@ public void testMinVersionsWithKeepDeletedCellsTTL() throws Exception { private void verifyVersionedCellKeyValues(int ttl, ColumnFamilyDescriptor cfd) throws IOException { - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) - .setColumnFamily(cfd).build(); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).setColumnFamily(cfd).build(); HRegion region = hbu.createLocalHRegion(htd, null, null); @@ -551,9 +548,9 @@ private void verifyVersionedCellKeyValues(int ttl, ColumnFamilyDescriptor cfd) region.flush(true); region.compact(true); - Assert.assertEquals(startTS, EnvironmentEdgeManager.currentTime()); + assertEquals(startTS, EnvironmentEdgeManager.currentTime()); long expiredTime = EnvironmentEdgeManager.currentTime() - ts - 4; - Assert.assertTrue("TTL for T1 has expired", expiredTime < (ttl * 1000)); + assertTrue(expiredTime < (ttl * 1000), "TTL for T1 has expired"); // check that nothing was purged yet verifyBeforeCompaction(region, ts); @@ -593,13 +590,13 @@ private void verifyAfterTtl(HRegion region, long ts) throws IOException { get.readAllVersions(); get.setTimestamp(ts - 3); result = region.get(get); - Assert.assertEquals(result.getColumnCells(c0, c0).size(), 0); + assertEquals(result.getColumnCells(c0, c0).size(), 0); get = new Get(T1); get.readAllVersions(); get.setTimeRange(0, ts - 2); result = region.get(get); - Assert.assertEquals(result.getColumnCells(c0, c0).size(), 0); + assertEquals(result.getColumnCells(c0, c0).size(), 0); } private void verifyBeforeCompaction(HRegion region, long ts) throws IOException { @@ -659,8 +656,8 @@ private void checkResult(Result r, byte[] col, byte[]... vals) { for (int i = 0; i < vals.length; i++) { String expected = Bytes.toString(vals[i]); String actual = Bytes.toString(CellUtil.cloneValue(kvs.get(i))); - assertTrue(expected + " was expected but doesn't match " + actual, - CellUtil.matchingValue(kvs.get(i), vals[i])); + assertTrue(CellUtil.matchingValue(kvs.get(i), vals[i]), + expected + " was expected but doesn't match " + actual); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java index 68885a7bff0b..ed623ba00176 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -29,17 +28,13 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALEdit; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMiniBatchOperationInProgress { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMiniBatchOperationInProgress.class); - @Test public void testMiniBatchOperationInProgressMethods() { Pair[] operations = new Pair[10]; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java index 654df41cdc73..55214616ebf1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java @@ -20,8 +20,8 @@ import static org.apache.hadoop.hbase.HBaseTestingUtil.START_KEY_BYTES; import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1; import static org.apache.hadoop.hbase.HBaseTestingUtil.fam2; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Collection; @@ -29,7 +29,6 @@ import java.util.Optional; import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTestConst; @@ -48,27 +47,21 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Test minor compactions */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMinorCompaction { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMinorCompaction.class); - - @Rule - public TestName name = new TestName(); + private String name; private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static Configuration CONF = UTIL.getConfiguration(); @@ -94,7 +87,7 @@ public CompactionRequestImpl selectCompaction(Collection candidateFi } } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() { // Set cache flush size to 1MB CONF.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024); @@ -114,15 +107,16 @@ public static void setUpBeforeClass() { COL2 = Bytes.toBytes("column2"); } - @Before - public void setUp() throws Exception { - this.htd = UTIL.createTableDescriptor(TableName.valueOf(name.getMethodName()), + @BeforeEach + public void setUp(TestInfo testInfo) throws Exception { + this.name = testInfo.getTestMethod().get().getName(); + this.htd = UTIL.createTableDescriptor(TableName.valueOf(name), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); this.r = UTIL.createLocalHRegion(htd, null, null); } - @After + @AfterEach public void tearDown() throws Exception { WAL wal = ((HRegion) r).getWAL(); ((HRegion) r).close(); @@ -240,15 +234,15 @@ private void testMinorCompactionWithDelete(Delete delete, int expectedResultsAft // do a compaction HStore store2 = r.getStore(fam2); int numFiles1 = store2.getStorefiles().size(); - assertTrue("Was expecting to see 4 store files", numFiles1 > COMPACTION_THRESHOLD); // > 3 + assertTrue(numFiles1 > COMPACTION_THRESHOLD, "Was expecting to see 4 store files"); // > 3 Optional compaction = store2.requestCompaction(); assertTrue(compaction.isPresent()); store2.compact(compaction.get(), NoLimitThroughputController.INSTANCE, null); // = 3 int numFiles2 = store2.getStorefiles().size(); // Check that we did compact - assertTrue("Number of store files should go down", numFiles1 > numFiles2); + assertTrue(numFiles1 > numFiles2, "Number of store files should go down"); // Check that it was a minor compaction. - assertTrue("Was not supposed to be a major compaction", numFiles2 > 1); + assertTrue(numFiles2 > 1, "Was not supposed to be a major compaction"); // Make sure that we have only deleted family2 from secondRowBytes result = r.get(new Get(SECOND_ROW_BYTES).addColumn(fam2, COL2).readVersions(100)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index 1de37bcb018f..818dc1b9d422 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -33,6 +32,7 @@ import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.Stream; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; @@ -50,8 +50,8 @@ import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory; import org.apache.hadoop.hbase.util.BloomFilterUtil; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Test; -import org.junit.runners.Parameterized.Parameter; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -107,14 +107,18 @@ public abstract class TestMultiColumnScanner { private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); - @Parameter(0) - public Compression.Algorithm comprAlgo; + private final Compression.Algorithm comprAlgo; - @Parameter(1) - public BloomType bloomType; + private final BloomType bloomType; - @Parameter(2) - public DataBlockEncoding dataBlockEncoding; + private final DataBlockEncoding dataBlockEncoding; + + protected TestMultiColumnScanner(Compression.Algorithm comprAlgo, BloomType bloomType, + DataBlockEncoding dataBlockEncoding) { + this.comprAlgo = comprAlgo; + this.bloomType = bloomType; + this.dataBlockEncoding = dataBlockEncoding; + } // Some static sanity-checking. static { @@ -125,18 +129,18 @@ public abstract class TestMultiColumnScanner { assertTrue(TIMESTAMPS[i] < TIMESTAMPS[i + 1]); } - public static Collection generateParams(Compression.Algorithm algo, + public static Stream generateParams(Compression.Algorithm algo, boolean useDataBlockEncoding) { - List parameters = new ArrayList<>(); + List parameters = new ArrayList<>(); for (BloomType bloomType : BloomType.values()) { DataBlockEncoding dataBlockEncoding = useDataBlockEncoding ? DataBlockEncoding.PREFIX : DataBlockEncoding.NONE; - parameters.add(new Object[] { algo, bloomType, dataBlockEncoding }); + parameters.add(Arguments.of(algo, bloomType, dataBlockEncoding)); } - return parameters; + return parameters.stream(); } - @Test + @TestTemplate public void testMultiColumnScanner() throws IOException { TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10); HRegion region = TEST_UTIL.createTestRegion(TABLE_NAME, @@ -233,11 +237,10 @@ public void testMultiColumnScanner() throws IOException { deleteInfo = "; last timestamp when row/column " + rowQual + " was deleted: " + lastDelTS; } - assertTrue( - "Scanner returned additional key/value: " + kv + ", " + queryInfo + deleteInfo + ";", - kvPos < kvs.size()); - assertTrue("Scanner returned wrong key/value; " + queryInfo + deleteInfo + ";", - PrivateCellUtil.equalsIgnoreMvccVersion(kvs.get(kvPos), kv)); + assertTrue(kvPos < kvs.size(), + "Scanner returned additional key/value: " + kv + ", " + queryInfo + deleteInfo + ";"); + assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(kvs.get(kvPos), kv), + "Scanner returned wrong key/value; " + queryInfo + deleteInfo + ";"); ++kvPos; ++numResults; } @@ -245,15 +248,14 @@ public void testMultiColumnScanner() throws IOException { } for (; kvPos < kvs.size(); ++kvPos) { KeyValue remainingKV = kvs.get(kvPos); - assertFalse( + assertFalse(matchesQuery(remainingKV, qualSet, maxVersions, lastDelTimeMap), "Matching column not returned by scanner: " + remainingKV + ", " + queryInfo - + ", results returned: " + numResults, - matchesQuery(remainingKV, qualSet, maxVersions, lastDelTimeMap)); + + ", results returned: " + numResults); } } } - assertTrue("This test is supposed to delete at least some row/column " + "pairs", - lastDelTimeMap.size() > 0); + assertTrue(lastDelTimeMap.size() > 0, + "This test is supposed to delete at least some row/column " + "pairs"); LOG.info("Number of row/col pairs deleted at least once: " + lastDelTimeMap.size()); HBaseTestingUtil.closeRegionAndWAL(region); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndNoDataEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndNoDataEncoding.java index 0b62da322624..564797f67f9b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndNoDataEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndNoDataEncoding.java @@ -17,31 +17,30 @@ */ package org.apache.hadoop.hbase.regionserver; -import java.util.Collection; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; /** * Test case for Compression.Algorithm.GZ and no use data block encoding. * @see org.apache.hadoop.hbase.regionserver.TestMultiColumnScanner */ -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: algo={0}, bloomType={1}, dataBlockEncoding={2}") public class TestMultiColumnScannerWithAlgoGZAndNoDataEncoding extends TestMultiColumnScanner { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiColumnScannerWithAlgoGZAndNoDataEncoding.class); + public TestMultiColumnScannerWithAlgoGZAndNoDataEncoding(Algorithm comprAlgo, BloomType bloomType, + DataBlockEncoding dataBlockEncoding) { + super(comprAlgo, bloomType, dataBlockEncoding); + } - @Parameters - public static Collection parameters() { + public static Stream parameters() { return TestMultiColumnScanner.generateParams(Algorithm.GZ, false); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndUseDataEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndUseDataEncoding.java index 0186e5fa5209..e4ea01bf72e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndUseDataEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithAlgoGZAndUseDataEncoding.java @@ -17,31 +17,30 @@ */ package org.apache.hadoop.hbase.regionserver; -import java.util.Collection; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; /** * Test case for Compression.Algorithm.GZ and use data block encoding. * @see org.apache.hadoop.hbase.regionserver.TestMultiColumnScanner */ -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: algo={0}, bloomType={1}, dataBlockEncoding={2}") public class TestMultiColumnScannerWithAlgoGZAndUseDataEncoding extends TestMultiColumnScanner { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiColumnScannerWithAlgoGZAndUseDataEncoding.class); + public TestMultiColumnScannerWithAlgoGZAndUseDataEncoding(Algorithm comprAlgo, + BloomType bloomType, DataBlockEncoding dataBlockEncoding) { + super(comprAlgo, bloomType, dataBlockEncoding); + } - @Parameters - public static Collection parameters() { + public static Stream parameters() { return TestMultiColumnScanner.generateParams(Algorithm.GZ, true); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndNoDataEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndNoDataEncoding.java index e6291bead85a..5ed457a2c4cb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndNoDataEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndNoDataEncoding.java @@ -17,31 +17,30 @@ */ package org.apache.hadoop.hbase.regionserver; -import java.util.Collection; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; /** * Test case for Compression.Algorithm.NONE and no use data block encoding. * @see org.apache.hadoop.hbase.regionserver.TestMultiColumnScanner */ -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: algo={0}, bloomType={1}, dataBlockEncoding={2}") public class TestMultiColumnScannerWithNoneAndNoDataEncoding extends TestMultiColumnScanner { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiColumnScannerWithNoneAndNoDataEncoding.class); + public TestMultiColumnScannerWithNoneAndNoDataEncoding(Algorithm comprAlgo, BloomType bloomType, + DataBlockEncoding dataBlockEncoding) { + super(comprAlgo, bloomType, dataBlockEncoding); + } - @Parameters - public static Collection parameters() { + public static Stream parameters() { return TestMultiColumnScanner.generateParams(Algorithm.NONE, false); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndUseDataEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndUseDataEncoding.java index abffd4b6485d..d53ec1397e60 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndUseDataEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScannerWithNoneAndUseDataEncoding.java @@ -17,31 +17,30 @@ */ package org.apache.hadoop.hbase.regionserver; -import java.util.Collection; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; /** * Test case for Compression.Algorithm.NONE and no use data block encoding. * @see org.apache.hadoop.hbase.regionserver.TestMultiColumnScanner */ -@RunWith(Parameterized.class) -@Category({ RegionServerTests.class, LargeTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: algo={0}, bloomType={1}, dataBlockEncoding={2}") public class TestMultiColumnScannerWithNoneAndUseDataEncoding extends TestMultiColumnScanner { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiColumnScannerWithNoneAndUseDataEncoding.class); + public TestMultiColumnScannerWithNoneAndUseDataEncoding(Algorithm comprAlgo, BloomType bloomType, + DataBlockEncoding dataBlockEncoding) { + super(comprAlgo, bloomType, dataBlockEncoding); + } - @Parameters - public static Collection parameters() { + public static Stream parameters() { return TestMultiColumnScanner.generateParams(Algorithm.NONE, true); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java index e11bae99fccf..523263bf5801 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java @@ -17,34 +17,31 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; -import java.util.Arrays; -import java.util.List; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -62,14 +59,10 @@ * Tests logging of large batch commands via Multi. Tests are fast, but uses a mini-cluster (to test * via "Multi" commands) so classified as MediumTests */ -@RunWith(Parameterized.class) -@Category(MediumTests.class) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: rejectLargeBatchOp={0}") public class TestMultiLogThreshold { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiLogThreshold.class); - private static final TableName NAME = TableName.valueOf("tableName"); private static final byte[] TEST_FAM = Bytes.toBytes("fam"); @@ -81,12 +74,14 @@ public class TestMultiLogThreshold { private org.apache.logging.log4j.core.Appender appender; - @Parameterized.Parameter - public static boolean rejectLargeBatchOp; + private final boolean rejectLargeBatchOp; + + public static Stream parameters() { + return Stream.of(Arguments.of(false), Arguments.of(true)); + } - @Parameterized.Parameters - public static List params() { - return Arrays.asList(new Object[] { false }, new Object[] { true }); + public TestMultiLogThreshold(boolean rejectLargeBatchOp) { + this.rejectLargeBatchOp = rejectLargeBatchOp; } private final class LevelAndMessage { @@ -104,7 +99,7 @@ public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) { // log4j2 will reuse the LogEvent so we need to copy the level and message out. private BlockingDeque logs = new LinkedBlockingDeque<>(); - @Before + @BeforeEach public void setupTest() throws Exception { util = new HBaseTestingUtil(); conf = util.getConfiguration(); @@ -132,7 +127,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { .getLogger(RSRpcServices.class)).addAppender(appender); } - @After + @AfterEach public void tearDown() throws Exception { ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager .getLogger(RSRpcServices.class)).removeAppender(appender); @@ -193,7 +188,7 @@ private void assertLogBatchWarnings(boolean expected) { assertEquals(expected, actual); } - @Test + @TestTemplate public void testMultiLogThresholdRegionActions() throws ServiceException, IOException { try { sendMultiRequest(threshold + 1, ActionType.REGION_ACTIONS); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java index 43e5cfcbdb3c..c491d9617042 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java @@ -17,28 +17,25 @@ */ package org.apache.hadoop.hbase.regionserver; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * This is a hammer test that verifies MultiVersionConcurrencyControl in a multiple writer single * reader scenario. */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestMultiVersionConcurrencyControl { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiVersionConcurrencyControl.class); - static class Writer implements Runnable { final AtomicBoolean finished; final MultiVersionConcurrencyControl mvcc; @@ -132,9 +129,9 @@ public void run() { } // check failure. - Assert.assertFalse(readerFailed.get()); + assertFalse(readerFailed.get()); for (int i = 0; i < n; ++i) { - Assert.assertTrue(statuses[i].get()); + assertTrue(statuses[i].get()); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControlBasic.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControlBasic.java index 7fcd35c5bbcb..0070fd48904f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControlBasic.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControlBasic.java @@ -17,26 +17,21 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Very basic tests. * @see TestMultiVersionConcurrencyControl for more. */ -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestMultiVersionConcurrencyControlBasic { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiVersionConcurrencyControlBasic.class); - @Test public void testSimpleMvccOps() { MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMutateRowsRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMutateRowsRecovery.java index 9f5558a28b21..88493da6ff13 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMutateRowsRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMutateRowsRecovery.java @@ -18,11 +18,10 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; import org.apache.hadoop.hbase.TableName; @@ -42,21 +41,17 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category({ RegionServerTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestMutateRowsRecovery { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMutateRowsRecovery.class); - private SingleProcessHBaseCluster cluster = null; private Connection connection = null; private static final int NB_SERVERS = 3; @@ -70,24 +65,24 @@ public class TestMutateRowsRecovery { static final HBaseTestingUtil TESTING_UTIL = new HBaseTestingUtil(); - @BeforeClass + @BeforeAll public static void before() throws Exception { TESTING_UTIL.startMiniCluster(NB_SERVERS); } - @AfterClass + @AfterAll public static void after() throws Exception { TESTING_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setup() throws IOException { TESTING_UTIL.ensureSomeNonStoppedRegionServersAvailable(NB_SERVERS); this.connection = ConnectionFactory.createConnection(TESTING_UTIL.getConfiguration()); this.cluster = TESTING_UTIL.getMiniHBaseCluster(); } - @After + @AfterEach public void tearDown() throws IOException { if (this.connection != null) { this.connection.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java index 89a5a3c9b2a4..5352c304a5ca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNewVersionBehaviorFromClientSide.java @@ -17,12 +17,11 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -39,21 +38,17 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ RegionServerTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestNewVersionBehaviorFromClientSide { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestNewVersionBehaviorFromClientSide.class); - private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final byte[] ROW = Bytes.toBytes("r1"); @@ -63,22 +58,20 @@ public class TestNewVersionBehaviorFromClientSide { private static final byte[] col1 = Bytes.toBytes("col1"); private static final byte[] col2 = Bytes.toBytes("col2"); private static final byte[] col3 = Bytes.toBytes("col3"); + private String name; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(1); } - @AfterClass + @AfterAll public static void setDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } private Table createTable() throws IOException { - TableName tableName = TableName.valueOf(name.getMethodName()); + TableName tableName = TableName.valueOf(name); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName).setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(FAMILY).setNewVersionBehavior(true).setMaxVersions(3).build()).build(); @@ -86,6 +79,11 @@ private Table createTable() throws IOException { return TEST_UTIL.getConnection().getTable(tableName); } + @BeforeEach + public void setTestName(TestInfo testInfo) { + this.name = testInfo.getTestMethod().get().getName(); + } + @Test public void testPutAndDeleteVersions() throws IOException { try (Table t = createTable()) { @@ -312,9 +310,8 @@ public void testExplicitColum() throws IOException { @Test public void testGetColumnHint() throws IOException { createTable(); - try (Table t = - TEST_UTIL.getConnection().getTableBuilder(TableName.valueOf(name.getMethodName()), null) - .setOperationTimeout(10000).setRpcTimeout(10000).build()) { + try (Table t = TEST_UTIL.getConnection().getTableBuilder(TableName.valueOf(name), null) + .setOperationTimeout(10000).setRpcTimeout(10000).build()) { t.put(new Put(ROW).addColumn(FAMILY, col1, 100, value)); t.put(new Put(ROW).addColumn(FAMILY, col1, 101, value)); t.put(new Put(ROW).addColumn(FAMILY, col1, 102, value)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNotCleanupCompactedFileWhenRegionWarmup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNotCleanupCompactedFileWhenRegionWarmup.java index d666304cc85e..acb4a4d21a4b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNotCleanupCompactedFileWhenRegionWarmup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestNotCleanupCompactedFileWhenRegionWarmup.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -37,25 +36,21 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ LargeTests.class, RegionServerTests.class }) +@Tag(LargeTests.TAG) +@Tag(RegionServerTests.TAG) public class TestNotCleanupCompactedFileWhenRegionWarmup { private static final Logger LOG = LoggerFactory.getLogger(TestNotCleanupCompactedFileWhenRegionWarmup.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestNotCleanupCompactedFileWhenRegionWarmup.class); - private static HBaseTestingUtil TEST_UTIL; private static Admin admin; private static Table table; @@ -66,7 +61,7 @@ public class TestNotCleanupCompactedFileWhenRegionWarmup { private static byte[] QUALIFIER = Bytes.toBytes("cq"); private static byte[] VALUE = Bytes.toBytes("value"); - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { TEST_UTIL = new HBaseTestingUtil(); // Set the scanner lease to 20min, so the scanner can't be closed by RegionServer @@ -80,12 +75,12 @@ public static void beforeClass() throws Exception { admin = TEST_UTIL.getAdmin(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void before() throws Exception { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TABLE_NAME); builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)); @@ -94,7 +89,7 @@ public void before() throws Exception { table = TEST_UTIL.getConnection().getTable(TABLE_NAME); } - @After + @AfterEach public void after() throws Exception { admin.disableTable(TABLE_NAME); admin.deleteTable(TABLE_NAME); @@ -110,7 +105,7 @@ public void testRegionWarmup() throws Exception { regions.addAll(rs.getRegions(TABLE_NAME)); } } - assertEquals("Table should only have one region", 1, regions.size()); + assertEquals(1, regions.size(), "Table should only have one region"); HRegion region = regions.get(0); HStore store = region.getStore(FAMILY); From 7dc911c0a48509b19f2d341fbf88bdf2db11e30e Mon Sep 17 00:00:00 2001 From: liuxiaocs7 Date: Tue, 5 May 2026 02:03:17 +0800 Subject: [PATCH 2/2] address comments --- .../hadoop/hbase/regionserver/TestHMobStore.java | 6 +++--- .../hbase/regionserver/TestHRegionTracing.java | 15 ++------------- .../hbase/regionserver/TestMajorCompaction.java | 10 ++++++---- .../regionserver/TestMetricsTableRequests.java | 6 ++++++ 4 files changed, 17 insertions(+), 20 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java index 6f82d63cd287..861d05edbb3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java @@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -77,14 +78,13 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.WALFactory; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Tag(MediumTests.TAG) +@org.junit.jupiter.api.Tag(MediumTests.TAG) public class TestHMobStore { public static final Logger LOG = LoggerFactory.getLogger(TestHMobStore.class); @@ -192,7 +192,7 @@ private void init(Configuration conf, ColumnFamilyDescriptor cfd) throws IOExcep String targetPathName = MobUtils.formatDate(currentDate); byte[] referenceValue = Bytes.toBytes(targetPathName + Path.SEPARATOR + mobFilePath.getName()); - org.apache.hadoop.hbase.Tag tableNameTag = + Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName()); KeyValue kv1 = new KeyValue(row, family, qf1, Long.MAX_VALUE, referenceValue); KeyValue kv2 = new KeyValue(row, family, qf2, Long.MAX_VALUE, referenceValue); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java index 9ec3206296a8..eacb55037f16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionTracing.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNameTestExtension; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.wal.WAL; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -52,8 +50,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; -import org.apache.hbase.thirdparty.com.google.common.io.Closeables; - @Tag(RegionServerTests.TAG) @Tag(MediumTests.TAG) public class TestHRegionTracing { @@ -74,8 +70,6 @@ public class TestHRegionTracing { @RegisterExtension public final TableNameTestExtension tableNameRule = new TableNameTestExtension(); - private WAL wal; - private HRegion region; @AfterAll @@ -85,24 +79,19 @@ public static void tearDownAfterClass() throws IOException { @BeforeEach public void setUp() throws Throwable { + traceRule.clearSpans(); TableName tableName = tableNameRule.getTableName(); TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build(); ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT); - wal = HBaseTestingUtil.createWal(UTIL.getConfiguration(), - new Path(UTIL.getDataTestDir(), tableName.getNameAsString()), null); - region = HRegion.createHRegion(info, UTIL.getDataTestDir(), UTIL.getConfiguration(), desc, wal); region = UTIL.createLocalHRegion(info, desc); } @AfterEach public void tearDown() throws IOException { - if (region != null) { - region.close(); - } - Closeables.close(wal, true); + HBaseTestingUtil.closeRegionAndWAL(region); } private void assertSpan(String spanName) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index 7cb842e67b53..4720995eca5e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -87,6 +87,7 @@ public static Stream parameters() { protected Configuration conf = UTIL.getConfiguration(); private String name; + private final String compType; private HRegion r = null; private TableDescriptor htd = null; @@ -100,6 +101,7 @@ public static Stream parameters() { /** constructor */ public TestMajorCompaction(String compType) { super(); + this.compType = compType; // Set cache flush size to 1MB conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024); conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100); @@ -117,10 +119,10 @@ public TestMajorCompaction(String compType) { @BeforeEach public void setUp(TestInfo testInfo) throws Exception { this.name = testInfo.getTestMethod().get().getName(); - this.htd = - UTIL.createTableDescriptor(TableName.valueOf(name.replace('[', 'i').replace(']', 'i')), - ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, - ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); + this.htd = UTIL.createTableDescriptor( + TableName.valueOf((name + "-" + compType).replace('[', 'i').replace(']', 'i')), + ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, + ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); this.r = UTIL.createLocalHRegion(htd, null, null); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java index 46b12c22febe..a3463f59126a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsTableRequests.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.regionserver.metrics.MetricsTableRequests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; @@ -41,6 +42,11 @@ @Tag(SmallTests.TAG) public class TestMetricsTableRequests { + @AfterEach + public void tearDown() { + MetricRegistries.global().clear(); + } + @Test public void testMetricsTableLatencies() { TableName tn1 = TableName.valueOf("table1");