From 76e4fbdaf7454a6a31a4052fd19facf42751c3d1 Mon Sep 17 00:00:00 2001 From: Nick Dimiduk Date: Mon, 12 Jan 2015 15:42:27 -0800 Subject: [PATCH] HBASE-11983 HRegion constructors should not create HLog --- .../hadoop/hbase/master/MasterFileSystem.java | 4 +- .../apache/hadoop/hbase/regionserver/HRegion.java | 112 +-------- .../hbase/snapshot/RestoreSnapshotHelper.java | 6 +- .../org/apache/hadoop/hbase/util/HBaseFsck.java | 29 ++- .../apache/hadoop/hbase/util/HBaseFsckRepair.java | 2 +- .../hadoop/hbase/util/ModifyRegionUtils.java | 65 ++--- .../org/apache/hadoop/hbase/wal/WALFactory.java | 2 +- .../org/apache/hadoop/hbase/HBaseTestCase.java | 13 +- .../apache/hadoop/hbase/HBaseTestingUtility.java | 80 +++++-- .../hbase/client/TestIntraRowPagination.java | 7 +- .../coprocessor/TestCoprocessorInterface.java | 8 +- .../TestRegionObserverScannerOpenHook.java | 6 +- .../coprocessor/TestRegionObserverStacking.java | 4 +- .../hbase/filter/TestColumnPrefixFilter.java | 17 +- .../hbase/filter/TestDependentColumnFilter.java | 7 +- .../org/apache/hadoop/hbase/filter/TestFilter.java | 18 +- .../hbase/filter/TestInvocationRecordFilter.java | 2 +- .../filter/TestMultipleColumnPrefixFilter.java | 18 +- .../hadoop/hbase/io/encoding/TestPrefixTree.java | 4 +- .../hfile/TestScannerSelectionUsingKeyRange.java | 5 +- .../io/hfile/TestScannerSelectionUsingTTL.java | 5 +- .../hadoop/hbase/master/TestMasterFailover.java | 4 +- .../hbase/regionserver/TestAtomicOperation.java | 5 +- .../hadoop/hbase/regionserver/TestBlocksRead.java | 12 +- .../hbase/regionserver/TestColumnSeeking.java | 6 +- .../regionserver/TestDefaultCompactSelection.java | 4 +- .../regionserver/TestGetClosestAtOrBefore.java | 14 +- .../hadoop/hbase/regionserver/TestHRegion.java | 261 ++++++++++++--------- .../hadoop/hbase/regionserver/TestHRegionInfo.java | 7 +- .../hbase/regionserver/TestJoinedScanners.java | 24 -- .../hadoop/hbase/regionserver/TestKeepDeletes.java | 26 +- .../hadoop/hbase/regionserver/TestMinVersions.java | 12 +- .../hbase/regionserver/TestMultiColumnScanner.java | 2 +- .../regionserver/TestPerColumnFamilyFlush.java | 14 +- .../regionserver/TestRegionMergeTransaction.java | 8 +- .../hbase/regionserver/TestResettingCounters.java | 6 +- .../hbase/regionserver/TestReversibleScanners.java | 12 +- .../hbase/regionserver/TestScanWithBloomError.java | 2 +- .../hadoop/hbase/regionserver/TestScanner.java | 14 +- .../hbase/regionserver/TestSeekOptimizations.java | 2 +- .../hbase/regionserver/TestSplitTransaction.java | 10 +- .../hadoop/hbase/regionserver/TestWideScanner.java | 3 +- .../hadoop/hbase/regionserver/wal/TestFSHLog.java | 13 +- .../hbase/regionserver/wal/TestWALReplay.java | 38 ++- .../apache/hadoop/hbase/util/TestMergeTable.java | 14 +- .../apache/hadoop/hbase/util/TestMergeTool.java | 7 +- 46 files changed, 427 insertions(+), 507 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index fcfa07f..4d72312 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -513,9 +513,9 @@ public class MasterFileSystem { HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO); HTableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME); setInfoFamilyCachingForMeta(metaDescriptor, false); - HRegion meta = HRegion.createHRegion(metaHRI, rd, c, metaDescriptor); + HRegion meta = HRegion.createHRegion(metaHRI, rd, c, metaDescriptor, null); setInfoFamilyCachingForMeta(metaDescriptor, true); - HRegion.closeHRegion(meta); + meta.close(); } catch (IOException e) { e = e instanceof RemoteException ? ((RemoteException)e).unwrapRemoteException() : e; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 6cf2ce3..b3508a0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -3383,13 +3383,10 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { // * not check the families for validity. * * @param familyMap Map of kvs per family - * @param localizedWriteEntry The WriteEntry of the MVCC for this transaction. - * If null, then this method internally creates a mvcc transaction. - * @param output newly added KVs into memstore + * @param mvccNum The MVCC for this transaction. * @param isInReplay true when adding replayed KVs into memstore * @return the additional memory usage of the memstore caused by the * new entries. - * @throws IOException */ private long applyFamilyMapToMemstore(Map> familyMap, long mvccNum, List memstoreCells, boolean isInReplay) throws IOException { @@ -4673,55 +4670,13 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { // } /** - * Convenience method creating new HRegions. Used by createTable and by the - * bootstrap code in the HMaster constructor. - * Note, this method creates an {@link WAL} for the created region. It - * needs to be closed explicitly. Use {@link HRegion#getWAL()} to get - * access. When done with a region created using this method, you will - * need to explicitly close the {@link WAL} it created too; it will not be - * done for you. Not closing the wal will leave at least a daemon thread - * running. Call {@link #closeHRegion(HRegion)} and it will do - * necessary cleanup for you. - * @param info Info for region to create. - * @param rootDir Root directory for HBase instance - * @return new HRegion - * - * @throws IOException - */ - public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, - final Configuration conf, final HTableDescriptor hTableDescriptor) - throws IOException { - return createHRegion(info, rootDir, conf, hTableDescriptor, null); - } - - /** - * This will do the necessary cleanup a call to - * {@link #createHRegion(HRegionInfo, Path, Configuration, HTableDescriptor)} - * requires. This method will close the region and then close its - * associated {@link WAL} file. You can still use it if you call the other createHRegion, - * the one that takes an {@link WAL} instance but don't be surprised by the - * call to the {@link WAL#close()} on the {@link WAL} the - * HRegion was carrying. - * @throws IOException - */ - public static void closeHRegion(final HRegion r) throws IOException { - if (r == null) return; - r.close(); - if (r.getWAL() == null) return; - r.getWAL().close(); - } - - /** * Convenience method creating new HRegions. Used by createTable. - * The {@link WAL} for the created region needs to be closed explicitly. - * Use {@link HRegion#getWAL()} to get access. * * @param info Info for region to create. * @param rootDir Root directory for HBase instance * @param wal shared WAL * @param initialize - true to initialize the region * @return new HRegion - * * @throws IOException */ public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, @@ -4730,75 +4685,14 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { // final WAL wal, final boolean initialize) throws IOException { - return createHRegion(info, rootDir, conf, hTableDescriptor, - wal, initialize, false); - } - - /** - * Convenience method creating new HRegions. Used by createTable. - * The {@link WAL} for the created region needs to be closed - * explicitly, if it is not null. - * Use {@link HRegion#getWAL()} to get access. - * - * @param info Info for region to create. - * @param rootDir Root directory for HBase instance - * @param wal shared WAL - * @param initialize - true to initialize the region - * @param ignoreWAL - true to skip generate new wal if it is null, mostly for createTable - * @return new HRegion - * @throws IOException - */ - public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, - final Configuration conf, - final HTableDescriptor hTableDescriptor, - final WAL wal, - final boolean initialize, final boolean ignoreWAL) - throws IOException { - Path tableDir = FSUtils.getTableDir(rootDir, info.getTable()); - return createHRegion(info, rootDir, tableDir, conf, hTableDescriptor, wal, initialize, - ignoreWAL); - } - - /** - * Convenience method creating new HRegions. Used by createTable. - * The {@link WAL} for the created region needs to be closed - * explicitly, if it is not null. - * Use {@link HRegion#getWAL()} to get access. - * - * @param info Info for region to create. - * @param rootDir Root directory for HBase instance - * @param tableDir table directory - * @param wal shared WAL - * @param initialize - true to initialize the region - * @param ignoreWAL - true to skip generate new wal if it is null, mostly for createTable - * @return new HRegion - * @throws IOException - */ - public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, final Path tableDir, - final Configuration conf, - final HTableDescriptor hTableDescriptor, - final WAL wal, - final boolean initialize, final boolean ignoreWAL) - throws IOException { LOG.info("creating HRegion " + info.getTable().getNameAsString() + " HTD == " + hTableDescriptor + " RootDir = " + rootDir + " Table name == " + info.getTable().getNameAsString()); FileSystem fs = FileSystem.get(conf); + Path tableDir = FSUtils.getTableDir(rootDir, info.getTable()); HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, info); - WAL effectiveWAL = wal; - if (wal == null && !ignoreWAL) { - // TODO HBASE-11983 There'll be no roller for this wal? - // The WAL subsystem will use the default rootDir rather than the passed in rootDir - // unless I pass along via the conf. - Configuration confForWAL = new Configuration(conf); - confForWAL.set(HConstants.HBASE_DIR, rootDir.toString()); - effectiveWAL = (new WALFactory(confForWAL, - Collections.singletonList(new MetricsWAL()), - "hregion-" + RandomStringUtils.randomNumeric(8))). - getWAL(info.getEncodedNameAsBytes()); - } HRegion region = HRegion.newHRegion(tableDir, - effectiveWAL, fs, conf, info, hTableDescriptor, null); + wal, fs, conf, info, hTableDescriptor, null); if (initialize) { // If initializing, set the sequenceId. It is also required by WALPerformanceEvaluation when // verifying the WALEdits. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java index a1c2777..441dbbf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java @@ -506,7 +506,7 @@ public class RestoreSnapshotHelper { } // create the regions on disk - ModifyRegionUtils.createRegions(exec, conf, rootDir, tableDir, + ModifyRegionUtils.createRegions(exec, conf, rootDir, tableDesc, clonedRegionsInfo, new ModifyRegionUtils.RegionFillTask() { @Override public void fillRegion(final HRegion region) throws IOException { @@ -552,7 +552,7 @@ public class RestoreSnapshotHelper { * * @param familyDir destination directory for the store file * @param regionInfo destination region info for the table - * @param hfileName store file name (can be a Reference, HFileLink or simple HFile) + * @param storeFile store file name (can be a Reference, HFileLink or simple HFile) */ private void restoreStoreFile(final Path familyDir, final HRegionInfo regionInfo, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { @@ -582,7 +582,7 @@ public class RestoreSnapshotHelper { * * @param familyDir destination directory for the store file * @param regionInfo destination region info for the table - * @param hfileName reference file name + * @param storeFile reference file name */ private void restoreReferenceFile(final Path familyDir, final HRegionInfo regionInfo, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index a5ac8b8..7841a0d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -53,6 +53,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -111,6 +112,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.Block import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; +import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL; +import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator; @@ -119,6 +122,8 @@ import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; import org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler; import org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl; import org.apache.hadoop.hbase.util.hbck.TableLockChecker; +import org.apache.hadoop.hbase.wal.WAL; +import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -1214,17 +1219,26 @@ public class HBaseFsck extends Configured implements Closeable { } /** - * This borrows code from MasterFileSystem.bootstrap() + * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be + * sure to close it as well as the region when you're finished. * * @return an open hbase:meta HRegion */ private HRegion createNewMeta() throws IOException { - Path rootdir = FSUtils.getRootDir(getConf()); + Path rootdir = FSUtils.getRootDir(getConf()); Configuration c = getConf(); HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO); HTableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME); MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false); - HRegion meta = HRegion.createHRegion(metaHRI, rootdir, c, metaDescriptor); + // The WAL subsystem will use the default rootDir rather than the passed in rootDir + // unless I pass along via the conf. + Configuration confForWAL = new Configuration(c); + confForWAL.set(HConstants.HBASE_DIR, rootdir.toString()); + WAL wal = (new WALFactory(confForWAL, + Collections.singletonList(new MetricsWAL()), + "hbck-meta-recovery-" + RandomStringUtils.randomNumeric(8))). + getWAL(metaHRI.getEncodedNameAsBytes()); + HRegion meta = HRegion.createHRegion(metaHRI, rootdir, c, metaDescriptor, wal); MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, true); return meta; } @@ -1282,8 +1296,8 @@ public class HBaseFsck extends Configured implements Closeable { } /** - * Rebuilds meta from information in hdfs/fs. Depends on configuration - * settings passed into hbck constructor to point to a particular fs/dir. + * Rebuilds meta from information in hdfs/fs. Depends on configuration settings passed into + * hbck constructor to point to a particular fs/dir. Assumes HBase is OFFLINE. * * @param fix flag that determines if method should attempt to fix holes * @return true if successful, false if attempt failed. @@ -1339,7 +1353,10 @@ public class HBaseFsck extends Configured implements Closeable { return false; } meta.batchMutate(puts.toArray(new Put[puts.size()])); - HRegion.closeHRegion(meta); + meta.close(); + if (meta.getWAL() != null) { + meta.getWAL().close(); + } LOG.info("Success! hbase:meta table rebuilt."); LOG.info("Old hbase:meta is moved into " + backupDir); return true; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java index 0a0abda..8175454 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java @@ -213,7 +213,7 @@ public class HBaseFsckRepair { HRegion region = HRegion.createHRegion(hri, root, conf, htd, null); // Close the new region to flush to disk. Close log file too. - HRegion.closeHRegion(region); + region.close(); return region; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java index 75884da..95d8a17 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CompletionService; @@ -32,8 +33,10 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -41,6 +44,10 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.master.AssignmentManager; +import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL; +import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; +import org.apache.hadoop.hbase.wal.WAL; +import org.apache.hadoop.hbase.wal.WALFactory; /** * Utility methods for interacting with the regions. @@ -68,53 +75,18 @@ public abstract class ModifyRegionUtils { * @param rootDir Root directory for HBase instance * @param hTableDescriptor description of the table * @param newRegions {@link HRegionInfo} that describes the regions to create - * @throws IOException - */ - public static List createRegions(final Configuration conf, final Path rootDir, - final HTableDescriptor hTableDescriptor, final HRegionInfo[] newRegions) throws IOException { - return createRegions(conf, rootDir, hTableDescriptor, newRegions, null); - } - - /** - * Create new set of regions on the specified file-system. - * NOTE: that you should add the regions to hbase:meta after this operation. - * - * @param conf {@link Configuration} - * @param rootDir Root directory for HBase instance - * @param hTableDescriptor description of the table - * @param newRegions {@link HRegionInfo} that describes the regions to create * @param task {@link RegionFillTask} custom code to populate region after creation * @throws IOException */ public static List createRegions(final Configuration conf, final Path rootDir, final HTableDescriptor hTableDescriptor, final HRegionInfo[] newRegions, final RegionFillTask task) throws IOException { - - Path tableDir = FSUtils.getTableDir(rootDir, hTableDescriptor.getTableName()); - return createRegions(conf, rootDir, tableDir, hTableDescriptor, newRegions, task); - } - - /** - * Create new set of regions on the specified file-system. - * NOTE: that you should add the regions to hbase:meta after this operation. - * - * @param conf {@link Configuration} - * @param rootDir Root directory for HBase instance - * @param tableDir table directory - * @param hTableDescriptor description of the table - * @param newRegions {@link HRegionInfo} that describes the regions to create - * @param task {@link RegionFillTask} custom code to populate region after creation - * @throws IOException - */ - public static List createRegions(final Configuration conf, final Path rootDir, - final Path tableDir, final HTableDescriptor hTableDescriptor, final HRegionInfo[] newRegions, - final RegionFillTask task) throws IOException { if (newRegions == null) return null; int regionNumber = newRegions.length; ThreadPoolExecutor exec = getRegionOpenAndInitThreadPool(conf, "RegionOpenAndInitThread-" + hTableDescriptor.getTableName(), regionNumber); try { - return createRegions(exec, conf, rootDir, tableDir, hTableDescriptor, newRegions, task); + return createRegions(exec, conf, rootDir, hTableDescriptor, newRegions, task); } finally { exec.shutdownNow(); } @@ -127,14 +99,13 @@ public abstract class ModifyRegionUtils { * @param exec Thread Pool Executor * @param conf {@link Configuration} * @param rootDir Root directory for HBase instance - * @param tableDir table directory * @param hTableDescriptor description of the table * @param newRegions {@link HRegionInfo} that describes the regions to create * @param task {@link RegionFillTask} custom code to populate region after creation * @throws IOException */ public static List createRegions(final ThreadPoolExecutor exec, - final Configuration conf, final Path rootDir, final Path tableDir, + final Configuration conf, final Path rootDir, final HTableDescriptor hTableDescriptor, final HRegionInfo[] newRegions, final RegionFillTask task) throws IOException { if (newRegions == null) return null; @@ -146,7 +117,7 @@ public abstract class ModifyRegionUtils { completionService.submit(new Callable() { @Override public HRegionInfo call() throws IOException { - return createRegion(conf, rootDir, tableDir, hTableDescriptor, newRegion, task); + return createRegion(conf, rootDir, hTableDescriptor, newRegion, task); } }); } @@ -168,19 +139,24 @@ public abstract class ModifyRegionUtils { * Create new set of regions on the specified file-system. * @param conf {@link Configuration} * @param rootDir Root directory for HBase instance - * @param tableDir table directory * @param hTableDescriptor description of the table * @param newRegion {@link HRegionInfo} that describes the region to create * @param task {@link RegionFillTask} custom code to populate region after creation * @throws IOException */ public static HRegionInfo createRegion(final Configuration conf, final Path rootDir, - final Path tableDir, final HTableDescriptor hTableDescriptor, final HRegionInfo newRegion, + final HTableDescriptor hTableDescriptor, final HRegionInfo newRegion, final RegionFillTask task) throws IOException { // 1. Create HRegion - HRegion region = HRegion.createHRegion(newRegion, - rootDir, tableDir, conf, hTableDescriptor, null, - false, true); + // The WAL subsystem will use the default rootDir rather than the passed in rootDir + // unless I pass along via the conf. + Configuration confForWAL = new Configuration(conf); + confForWAL.set(HConstants.HBASE_DIR, rootDir.toString()); + WAL wal = (new WALFactory(confForWAL, + Collections.singletonList(new MetricsWAL()), + "hregion-" + RandomStringUtils.randomNumeric(8))). + getWAL(newRegion.getEncodedNameAsBytes()); + HRegion region = HRegion.createHRegion(newRegion, rootDir, conf, hTableDescriptor, wal, false); try { // 2. Custom user code to interact with the created region if (task != null) { @@ -189,6 +165,7 @@ public abstract class ModifyRegionUtils { } finally { // 3. Close the new region to flush to disk. Close log file too. region.close(); + wal.close(); } return region.getRegionInfo(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java index ba349e5..5dc1dde 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java @@ -62,7 +62,7 @@ import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; * server. * * - * Alternatively, you may provide a custome implementation of {@link WALProvider} by class name. + * Alternatively, you may provide a custom implementation of {@link WALProvider} by class name. */ @InterfaceAudience.Private public class WALFactory { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 3705f3b..e4dc09e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -149,9 +149,8 @@ public abstract class HBaseTestCase extends TestCase { } /** - * You must call close on the returned region and then close on the log file - * it created. Do {@link HRegion#close()} followed by {@link HRegion#getWAL()} - * and on it call close. + * You must call close on the returned region and then close on the log file it created. Do + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to close both the region and the WAL. * @param desc * @param startKey * @param endKey @@ -168,7 +167,7 @@ public abstract class HBaseTestCase extends TestCase { byte [] endKey, Configuration conf) throws IOException { HRegionInfo hri = new HRegionInfo(desc.getTableName(), startKey, endKey); - return HRegion.createHRegion(hri, testDir, conf, desc); + return HBaseTestingUtility.createRegionAndWAL(hri, testDir, conf, desc); } protected HRegion openClosedRegion(final HRegion closedRegion) @@ -641,12 +640,12 @@ public abstract class HBaseTestCase extends TestCase { */ protected void createMetaRegion() throws IOException { FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(conf); - meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, testDir, - conf, fsTableDescriptors.get(TableName.META_TABLE_NAME) ); + meta = HBaseTestingUtility.createRegionAndWAL(HRegionInfo.FIRST_META_REGIONINFO, testDir, + conf, fsTableDescriptors.get(TableName.META_TABLE_NAME)); } protected void closeRootAndMeta() throws IOException { - HRegion.closeHRegion(meta); + HBaseTestingUtility.closeRegionAndWAL(meta); } public static void assertByteEquals(byte[] expected, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 40b1364..a77cea8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase; +import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Jdk14Logger; @@ -62,6 +63,8 @@ import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; +import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL; +import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.tool.Canary; import org.apache.hadoop.hbase.util.Bytes; @@ -75,6 +78,7 @@ import org.apache.hadoop.hbase.util.RegionSplitter; import org.apache.hadoop.hbase.util.RetryCounter; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.wal.WAL; +import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.zookeeper.EmptyWatcher; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZKConfig; @@ -124,7 +128,7 @@ import static org.junit.Assert.fail; * Create an instance and keep it around testing HBase. This class is * meant to be your one-stop shop for anything you might need testing. Manages * one cluster at a time only. Managed cluster can be an in-process - * {@link MiniHBaseCluster}, or a deployed cluster of type {@link DistributedHBaseCluster}. + * {@link MiniHBaseCluster}, or a deployed cluster of type {@code DistributedHBaseCluster}. * Not all methods work with the real cluster. * Depends on log4j being on classpath and * hbase-site.xml for logging and test-run configuration. It does not set @@ -277,6 +281,16 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } /** + * Close both the HRegion {@code r} and it's underlying WAL. For use in tests. + */ + public static void closeRegionAndWAL(final HRegion r) throws IOException { + if (r == null) return; + r.close(); + if (r.getWAL() == null) return; + r.getWAL().close(); + } + + /** * Returns this classes's instance of {@link Configuration}. Be careful how * you use the returned Configuration since {@link HConnection} instances * can be shared. The Map of HConnections is keyed by the Configuration. If @@ -1692,13 +1706,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { public static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR}; public static final String START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET); - /** - * Create a table of name name with {@link COLUMNS} for - * families. - * @param name Name to give table. - * @param versions How many versions to allow per column. - * @return Column descriptor. - */ public HTableDescriptor createTableDescriptor(final String name, final int minVersions, final int versions, final int ttl, KeepDeletedCells keepDeleted) { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name)); @@ -1715,8 +1722,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } /** - * Create a table of name name with {@link COLUMNS} for - * families. + * Create a table of name name. * @param name Name to give table. * @return Column descriptor. */ @@ -1741,14 +1747,11 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } /** - * Create an HRegion that writes to the local tmp dirs - * @param info - * @param desc - * @return - * @throws IOException + * Create an HRegion that writes to the local tmp dirs. Creates the WAL for you. Be sure to call + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when you're finished with it. */ public HRegion createLocalHRegion(HRegionInfo info, HTableDescriptor desc) throws IOException { - return HRegion.createHRegion(info, getDataTestDir(), getConfiguration(), desc); + return createRegionAndWAL(info, getDataTestDir(), getConfiguration(), desc); } /** @@ -1774,7 +1777,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @param families * @throws IOException * @return A region on which you must call - * {@link HRegion#closeHRegion(HRegion)} when done. + {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done. */ public HRegion createLocalHRegion(byte[] tableName, byte[] startKey, byte[] stopKey, String callingMethod, Configuration conf, boolean isReadOnly, Durability durability, @@ -2292,6 +2295,41 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { } /** + * Create an unmanaged WAL. Be sure to close it when you're through. + */ + public static WAL createWal(final Configuration conf, final Path rootDir, final HRegionInfo hri) + throws IOException { + // The WAL subsystem will use the default rootDir rather than the passed in rootDir + // unless I pass along via the conf. + Configuration confForWAL = new Configuration(conf); + confForWAL.set(HConstants.HBASE_DIR, rootDir.toString()); + return (new WALFactory(confForWAL, + Collections.singletonList(new MetricsWAL()), + "hregion-" + RandomStringUtils.randomNumeric(8))). + getWAL(hri.getEncodedNameAsBytes()); + } + + /** + * Create a region with it's own WAL. Be sure to call + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all resources. + */ + public static HRegion createRegionAndWAL(final HRegionInfo info, final Path rootDir, + final Configuration conf, final HTableDescriptor htd) throws IOException { + return createRegionAndWAL(info, rootDir, conf, htd, true); + } + + /** + * Create a region with it's own WAL. Be sure to call + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to clean up all resources. + */ + public static HRegion createRegionAndWAL(final HRegionInfo info, final Path rootDir, + final Configuration conf, final HTableDescriptor htd, boolean initialize) + throws IOException { + WAL wal = createWal(conf, rootDir, info); + return HRegion.createHRegion(info, rootDir, conf, htd, wal, initialize); + } + + /** * Returns all rows from the hbase:meta table. * * @throws IOException When reading the rows fails. @@ -2888,7 +2926,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { /** * Waits for a table to be 'enabled'. Enabled means that table is set as 'enabled' and the * regions have been all assigned. Will timeout after default period (30 seconds) - * @see #waitTableAvailable(byte[]) * @param table Table to wait on. * @param table * @throws InterruptedException @@ -2907,7 +2944,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { /** * Waits for a table to be 'enabled'. Enabled means that table is set as 'enabled' and the * regions have been all assigned. - * @see #waitTableAvailable(byte[]) + * @see #waitTableEnabled(Admin, byte[], long) * @param table Table to wait on. * @param timeoutMillis Time to wait on it being marked enabled. * @throws InterruptedException @@ -2960,7 +2997,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { /** * Waits for a table to be 'disabled'. Disabled means that table is set as 'disabled' - * @see #waitTableAvailable(byte[]) * @param table Table to wait on. * @param timeoutMillis Time to wait on it being marked disabled. * @throws InterruptedException @@ -3619,9 +3655,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(TableName.valueOf(tableName), null, null, false); - HRegion region = - HRegion.createHRegion(info, getDataTestDir(), getConfiguration(), htd); - return region; + return createRegionAndWAL(info, getDataTestDir(), getConfiguration(), htd); } public void setFileSystemURI(String fsURI) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java index add8221..ff142d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.wal.WAL; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -60,8 +61,8 @@ public class TestIntraRowPagination { HColumnDescriptor hcd = new HColumnDescriptor(family); htd.addFamily(hcd); } - HRegion region = - HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); try { Put put; Scan scan; @@ -101,7 +102,7 @@ public class TestIntraRowPagination { TestScannersFromClientSide.verifyResult(result, kvListExp, toLog, "Testing scan with storeOffset and storeLimit"); } finally { - region.close(); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java index c3791c3..3e0a624 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java @@ -63,6 +63,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.PairOfSameType; +import org.apache.hadoop.hbase.wal.WAL; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -350,6 +351,7 @@ public class TestCoprocessorInterface { // hence the old entry was indeed removed by the GC and new one has been created Object o3 = ((CoprocessorII)c2).getSharedData().get("test2"); assertFalse(o3 == o2); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test @@ -374,7 +376,7 @@ public class TestCoprocessorInterface { for (int i = 0; i < regions.length; i++) { regions[i] = reopenRegion(regions[i], CoprocessorImpl.class); } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); Coprocessor c = region.getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); @@ -394,7 +396,7 @@ public class TestCoprocessorInterface { assertTrue(((CoprocessorImpl)c).wasSplit()); for (int i = 0; i < regions.length; i++) { - HRegion.closeHRegion(regions[i]); + HBaseTestingUtility.closeRegionAndWAL(regions[i]); c = region.getCoprocessorHost() .findCoprocessor(CoprocessorImpl.class.getName()); assertTrue("Coprocessor not started", ((CoprocessorImpl)c).wasStarted()); @@ -441,7 +443,7 @@ public class TestCoprocessorInterface { } HRegionInfo info = new HRegionInfo(tableName, null, null, false); Path path = new Path(DIR + callingMethod); - HRegion r = HRegion.createHRegion(info, path, conf, htd); + HRegion r = HBaseTestingUtility.createRegionAndWAL(info, path, conf, htd); // this following piece is a hack. RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java index 88b001d..4c4d6e5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -151,7 +150,8 @@ public class TestRegionObserverScannerOpenHook { } HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); Path path = new Path(DIR + callingMethod); - HRegion r = HRegion.createHRegion(info, path, conf, htd); + WAL wal = HBaseTestingUtility.createWal(conf, path, info); + HRegion r = HRegion.createHRegion(info, path, conf, htd, wal); // this following piece is a hack. currently a coprocessorHost // is secretly loaded at OpenRegionHandler. we don't really // start a region server here, so just manually create cphost @@ -183,6 +183,7 @@ public class TestRegionObserverScannerOpenHook { assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " + r, r.listCells()); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test @@ -208,6 +209,7 @@ public class TestRegionObserverScannerOpenHook { assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " + r, r.listCells()); + HBaseTestingUtility.closeRegionAndWAL(region); } /* diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java index 126a2d2..eeaaa94 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WAL; import org.junit.experimental.categories.Category; @Category({CoprocessorTests.class, SmallTests.class}) @@ -102,7 +103,7 @@ public class TestRegionObserverStacking extends TestCase { } HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); Path path = new Path(DIR + callingMethod); - HRegion r = HRegion.createHRegion(info, path, conf, htd); + HRegion r = HBaseTestingUtility.createRegionAndWAL(info, path, conf, htd); // this following piece is a hack. currently a coprocessorHost // is secretly loaded at OpenRegionHandler. we don't really // start a region server here, so just manually create cphost @@ -139,6 +140,7 @@ public class TestRegionObserverStacking extends TestCase { assertTrue(idA < idB); assertTrue(idB < idC); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java index 0fbad42..a164560 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WAL; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -51,8 +52,8 @@ public class TestColumnPrefixFilter { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestColumnPrefixFilter")); htd.addFamily((new HColumnDescriptor(family)).setMaxVersions(3)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion region = HRegion.createHRegion(info, TEST_UTIL. - getDataTestDir(), TEST_UTIL.getConfiguration(), htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); try { List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); @@ -101,10 +102,10 @@ public class TestColumnPrefixFilter { assertEquals(prefixMap.get(s).size(), results.size()); } } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test @@ -113,8 +114,8 @@ public class TestColumnPrefixFilter { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestColumnPrefixFilter")); htd.addFamily((new HColumnDescriptor(family)).setMaxVersions(3)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion region = HRegion.createHRegion(info, TEST_UTIL. - getDataTestDir(), TEST_UTIL.getConfiguration(), htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); try { List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); @@ -166,10 +167,10 @@ public class TestColumnPrefixFilter { assertEquals(prefixMap.get(s).size(), results.size()); } } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } List generateRandomWords(int numberOfWords, String suffix) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index 06e0260..ea5c504 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WAL; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -81,14 +82,14 @@ public class TestDependentColumnFilter { hcd1.setMaxVersions(3); htd.addFamily(hcd1); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), - TEST_UTIL.getConfiguration(), htd); + this.region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); addData(); } @After public void tearDown() throws Exception { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); } private void addData() throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index 3396587..61321dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -142,8 +142,8 @@ public class TestFilter { htd.addFamily(new HColumnDescriptor(NEW_FAMILIES[0])); htd.addFamily(new HColumnDescriptor(NEW_FAMILIES[1])); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), - TEST_UTIL.getConfiguration(), htd); + this.region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); // Insert first half for(byte [] ROW : ROWS_ONE) { @@ -217,9 +217,7 @@ public class TestFilter { @After public void tearDown() throws Exception { - WAL wal = region.getWAL(); - region.close(); - wal.close(); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test @@ -656,8 +654,7 @@ public class TestFilter { /** * Tests the the {@link WhileMatchFilter} works in combination with a - * {@link Filter} that uses the - * {@link Filter#filterKeyValue(org.apache.hadoop.hbase.KeyValue)} method. + * {@link Filter} that uses the {@link Filter#filterKeyValue(Cell)} method. * * See HBASE-2258. * @@ -1453,7 +1450,7 @@ public class TestFilter { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestFilter")); htd.addFamily(new HColumnDescriptor(family)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion testRegion = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), + HRegion testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd); for(int i=0; i<5; i++) { @@ -2007,13 +2004,14 @@ public class TestFilter { } } + // TODO: intentionally disabled? public void testNestedFilterListWithSCVF() throws IOException { byte[] columnStatus = Bytes.toBytes("S"); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testNestedFilterListWithSCVF")); htd.addFamily(new HColumnDescriptor(FAMILIES[0])); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion testRegion = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), - TEST_UTIL.getConfiguration(), htd); + HRegion testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), + TEST_UTIL.getConfiguration(), htd); for(int i=0; i<10; i++) { Put p = new Put(Bytes.toBytes("row" + i)); p.setDurability(Durability.SKIP_WAL); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java index 70ef51f..5454480d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java @@ -68,7 +68,7 @@ public class TestInvocationRecordFilter { TableName.valueOf(TABLE_NAME_BYTES)); htd.addFamily(new HColumnDescriptor(FAMILY_NAME_BYTES)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - this.region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), + this.region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd); Put put = new Put(ROW_BYTES); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java index 0db5ecf..d2997af 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java @@ -54,8 +54,8 @@ public class TestMultipleColumnPrefixFilter { htd.addFamily(hcd); // HRegionInfo info = new HRegionInfo(htd, null, null, false); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion region = HRegion.createHRegion(info, TEST_UTIL. - getDataTestDir(), TEST_UTIL.getConfiguration(), htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL. + getDataTestDir(), TEST_UTIL.getConfiguration(), htd); List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); @@ -105,7 +105,7 @@ public class TestMultipleColumnPrefixFilter { while(scanner.next(results)); assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test @@ -120,8 +120,8 @@ public class TestMultipleColumnPrefixFilter { hcd2.setMaxVersions(3); htd.addFamily(hcd2); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion region = HRegion.createHRegion(info, TEST_UTIL. - getDataTestDir(), TEST_UTIL.getConfiguration(), htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL. + getDataTestDir(), TEST_UTIL.getConfiguration(), htd); List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); @@ -177,7 +177,7 @@ public class TestMultipleColumnPrefixFilter { while(scanner.next(results)); assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test @@ -186,8 +186,8 @@ public class TestMultipleColumnPrefixFilter { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestMultipleColumnPrefixFilter")); htd.addFamily(new HColumnDescriptor(family)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); - HRegion region = HRegion.createHRegion(info, TEST_UTIL. - getDataTestDir(), TEST_UTIL.getConfiguration(),htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL. + getDataTestDir(), TEST_UTIL.getConfiguration(), htd); List rows = generateRandomWords(100, "row"); List columns = generateRandomWords(10000, "column"); @@ -232,7 +232,7 @@ public class TestMultipleColumnPrefixFilter { assertEquals(results1.size(), results2.size()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } List generateRandomWords(int numberOfWords, String suffix) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java index 9b87c37..80a50b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java @@ -74,12 +74,12 @@ public class TestPrefixTree { htd.addFamily(new HColumnDescriptor(fam).setDataBlockEncoding(DataBlockEncoding.PREFIX_TREE)); HRegionInfo info = new HRegionInfo(tableName, null, null, false); Path path = testUtil.getDataTestDir(getClass().getSimpleName()); - region = HRegion.createHRegion(info, path, testUtil.getConfiguration(), htd); + region = HBaseTestingUtility.createRegionAndWAL(info, path, testUtil.getConfiguration(), htd); } @After public void tearDown() throws Exception { - region.close(true); + HBaseTestingUtility.closeRegionAndWAL(region); testUtil.cleanupTestDir(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java index 55aa97b..e8f6c1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java @@ -99,7 +99,8 @@ public class TestScannerSelectionUsingKeyRange { HTableDescriptor htd = new HTableDescriptor(TABLE); htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(TABLE); - HRegion region = HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(), conf, htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), conf, + htd); for (int iFile = 0; iFile < NUM_FILES; ++iFile) { for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { @@ -126,6 +127,6 @@ public class TestScannerSelectionUsingKeyRange { assertEquals(0, results.size()); Set accessedFiles = cache.getCachedFileNamesForTest(); assertEquals(expectedCount, accessedFiles.size()); - region.close(); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index c1a5061..1c426e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -107,7 +107,8 @@ public class TestScannerSelectionUsingTTL { htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(TABLE); HRegion region = - HRegion.createHRegion(info, TEST_UTIL.getDataTestDir(info.getEncodedName()), + HBaseTestingUtility.createRegionAndWAL(info, + TEST_UTIL.getDataTestDir(info.getEncodedName()), conf, htd); long ts = EnvironmentEdgeManager.currentTime(); @@ -157,6 +158,6 @@ public class TestScannerSelectionUsingTTL { region.compactStores(); } - region.close(); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java index f211754..64046a6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java @@ -64,13 +64,13 @@ public class TestMasterFailover { HRegion createRegion(final HRegionInfo hri, final Path rootdir, final Configuration c, final HTableDescriptor htd) throws IOException { - HRegion r = HRegion.createHRegion(hri, rootdir, c, htd); + HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, rootdir, c, htd); // The above call to create a region will create an wal file. Each // log file create will also create a running thread to do syncing. We need // to close out this log else we will have a running thread trying to sync // the file system continuously which is ugly when dfs is taken away at the // end of the test. - HRegion.closeHRegion(r); + HBaseTestingUtility.closeRegionAndWAL(r); return r; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java index 883e530..192c989 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java @@ -538,8 +538,9 @@ public class TestAtomicOperation { final String tableName = "testPutAndCheckAndPut"; Configuration conf = TEST_UTIL.getConfiguration(); conf.setClass(HConstants.REGION_IMPL, MockHRegion.class, HeapSize.class); - final MockHRegion region = (MockHRegion) TEST_UTIL.createLocalHRegion(Bytes.toBytes(tableName), - null, null, tableName, conf, false, Durability.SYNC_WAL, null, Bytes.toBytes(family)); + HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)) + .addFamily(new HColumnDescriptor(family)); + final MockHRegion region = (MockHRegion) TEST_UTIL.createLocalHRegion(htd, null, null); Put[] puts = new Put[1]; Put put = new Put(Bytes.toBytes("r1")); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java index 2bb8076..1ae17ed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java @@ -90,7 +90,7 @@ public class TestBlocksRead extends HBaseTestCase { } /** - * Callers must afterward call {@link HRegion#closeHRegion(HRegion)} + * Callers must afterward call {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} * @param tableName * @param callingMethod * @param conf @@ -112,7 +112,7 @@ public class TestBlocksRead extends HBaseTestCase { HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); Path path = new Path(DIR + callingMethod); - HRegion r = HRegion.createHRegion(info, path, conf, htd); + HRegion r = HBaseTestingUtility.createRegionAndWAL(info, path, conf, htd); blockCache = new CacheConfig(conf).getBlockCache(); return r; } @@ -265,7 +265,7 @@ public class TestBlocksRead extends HBaseTestCase { assertEquals(1, kvs.length); verifyData(kvs[0], "row", "col5", 5); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -374,7 +374,7 @@ public class TestBlocksRead extends HBaseTestCase { verifyData(kvs[1], "row", "col2", 12); verifyData(kvs[2], "row", "col3", 13); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -423,7 +423,7 @@ public class TestBlocksRead extends HBaseTestCase { assertEquals(2 * BLOOM_TYPE.length, blocksEnd - blocksStart); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -450,7 +450,7 @@ public class TestBlocksRead extends HBaseTestCase { assertEquals(1, kvs.length); verifyData(kvs[0], "row", "col99", 201); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java index 81ff370..7632a41 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java @@ -158,10 +158,10 @@ public class TestColumnSeeking { assertTrue(KeyValueTestUtil.containsIgnoreMvccVersion(results, kvSet)); } } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } @SuppressWarnings("unchecked") @@ -270,7 +270,7 @@ public class TestColumnSeeking { assertTrue(KeyValueTestUtil.containsIgnoreMvccVersion(results, kvSet)); } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } List generateRandomWords(int numberOfWords, String suffix) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java index 43bc9f1..4969a59 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java @@ -97,8 +97,8 @@ public class TestDefaultCompactSelection extends TestCase { final Configuration walConf = new Configuration(conf); FSUtils.setRootDir(walConf, basedir); wals = new WALFactory(walConf, null, id); - region = HRegion.createHRegion(info, basedir, conf, htd); - HRegion.closeHRegion(region); + region = HBaseTestingUtility.createRegionAndWAL(info, basedir, conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region); Path tableDir = FSUtils.getTableDir(basedir, htd.getTableName()); region = new HRegion(tableDir, wals.getWAL(info.getEncodedNameAsBytes()), fs, conf, info, htd, null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java index 92351f4..0d7820f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java @@ -29,6 +29,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; @@ -73,8 +74,8 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase { // Up flush size else we bind up when we use default catalog flush of 16k. fsTableDescriptors.get(TableName.META_TABLE_NAME).setMemStoreFlushSize(64 * 1024 * 1024); - HRegion mr = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, - rootdir, this.conf, fsTableDescriptors.get(TableName.META_TABLE_NAME)); + HRegion mr = HBaseTestingUtility.createRegionAndWAL(HRegionInfo.FIRST_META_REGIONINFO, + rootdir, this.conf, fsTableDescriptors.get(TableName.META_TABLE_NAME)); try { // Write rows for three tables 'A', 'B', and 'C'. for (char c = 'A'; c < 'D'; c++) { @@ -136,14 +137,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase { findRow(mr, 'C', 46, -1); findRow(mr, 'C', 43, -1); } finally { - if (mr != null) { - try { - mr.close(); - } catch (Exception e) { - e.printStackTrace(); - } - mr.getWAL().close(); - } + HBaseTestingUtility.closeRegionAndWAL(mr); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 992a978..760942a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -50,6 +50,7 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; @@ -60,6 +61,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; +import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -127,8 +129,10 @@ import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescripto import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; import org.apache.hadoop.hbase.regionserver.HRegion.RowLock; import org.apache.hadoop.hbase.regionserver.TestStore.FaultyFileSystem; +import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL; import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALUtil; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.wal.DefaultWALProvider; @@ -253,7 +257,7 @@ public class TestHRegion { // Close with something in memstore and something in the snapshot. Make sure all is cleared. region.close(); assertEquals(0, region.getMemstoreSize().get()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /* @@ -305,7 +309,24 @@ public class TestHRegion { } long sz = store.getFlushableSize(); assertTrue("flushable size should be zero, but it is " + sz, sz == 0); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); + } + + /** + * Create a WAL outside of the usual helper in + * {@link HBaseTestingUtility#createWal(Configuration, Path, HRegionInfo)} because that method + * doesn't play nicely with FaultyFileSystem. Call this method before overriding + * {@code fs.file.impl}. + * @param callingMethod a unique component for the path, probably the name of the test method. + */ + private static WAL createWALCompatibleWithFaultyFileSystem(String callingMethod, + Configuration conf, byte[] tableName) throws IOException { + final Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log"); + final Configuration walConf = new Configuration(conf); + FSUtils.setRootDir(walConf, logDir); + return (new WALFactory(walConf, + Collections.singletonList(new MetricsWAL()), callingMethod)) + .getWAL(tableName); } /** @@ -326,6 +347,8 @@ public class TestHRegion { @Test (timeout=60000) public void testFlushSizeAccounting() throws Exception { final Configuration conf = HBaseConfiguration.create(CONF); + final String callingMethod = name.getMethodName(); + final WAL wal = createWALCompatibleWithFaultyFileSystem(callingMethod, conf, tableName); // Only retry once. conf.setInt("hbase.hstore.flush.retries.number", 1); final User user = @@ -342,7 +365,8 @@ public class TestHRegion { HRegion region = null; try { // Initialize region - region = initHRegion(tableName, name.getMethodName(), conf, COLUMN_FAMILY_BYTES); + region = initHRegion(tableName, null, null, callingMethod, conf, false, + Durability.SYNC_WAL, wal, COLUMN_FAMILY_BYTES); long size = region.getMemstoreSize().get(); Assert.assertEquals(0, size); // Put one item into memstore. Measure the size of one item in memstore. @@ -376,7 +400,7 @@ public class TestHRegion { // Make sure our memory accounting is right. Assert.assertEquals(sizeOfOnePut * 2, region.getMemstoreSize().get()); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } return null; } @@ -387,6 +411,8 @@ public class TestHRegion { @Test (timeout=60000) public void testCloseWithFailingFlush() throws Exception { final Configuration conf = HBaseConfiguration.create(CONF); + final String callingMethod = name.getMethodName(); + final WAL wal = createWALCompatibleWithFaultyFileSystem(callingMethod, conf, tableName); // Only retry once. conf.setInt("hbase.hstore.flush.retries.number", 1); final User user = @@ -403,7 +429,8 @@ public class TestHRegion { HRegion region = null; try { // Initialize region - region = initHRegion(tableName, name.getMethodName(), conf, COLUMN_FAMILY_BYTES); + region = initHRegion(tableName, null, null, callingMethod, conf, false, + Durability.SYNC_WAL, wal, COLUMN_FAMILY_BYTES); long size = region.getMemstoreSize().get(); Assert.assertEquals(0, size); // Put one item into memstore. Measure the size of one item in memstore. @@ -428,7 +455,7 @@ public class TestHRegion { } finally { // Make it so all writes succeed from here on out so can close clean ffs.fault.set(false); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } return null; } @@ -561,7 +588,7 @@ public class TestHRegion { assertArrayEquals(Bytes.toBytes(i), CellUtil.cloneValue(kvs.get(0))); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; wals.close(); } @@ -619,7 +646,7 @@ public class TestHRegion { } } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; wals.close(); } @@ -652,7 +679,7 @@ public class TestHRegion { long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, null); assertEquals(minSeqId, seqId); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -717,7 +744,7 @@ public class TestHRegion { region.getStores().keySet().toArray(new byte[0][])).size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; wals.close(); } @@ -812,7 +839,7 @@ public class TestHRegion { assertArrayEquals(Bytes.toBytes(i), value); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; wals.close(); } @@ -931,7 +958,7 @@ public class TestHRegion { assertArrayEquals(Bytes.toBytes(i), value); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; wals.close(); } @@ -1051,7 +1078,7 @@ public class TestHRegion { } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1092,7 +1119,7 @@ public class TestHRegion { } } finally { if (this.region != null) { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); } } done.set(true); @@ -1108,7 +1135,7 @@ public class TestHRegion { } } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1185,7 +1212,7 @@ public class TestHRegion { assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3, 0, getNumberOfRows(keyPrefix3, value2, this.region)); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1204,7 +1231,7 @@ public class TestHRegion { } catch (IOException e) { exceptionCaught = true; } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } assertTrue(exceptionCaught == true); @@ -1223,7 +1250,7 @@ public class TestHRegion { } catch (IOException e) { exceptionCaught = true; } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } assertTrue(exceptionCaught == true); @@ -1319,7 +1346,7 @@ public class TestHRegion { } assertTrue(exception); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1360,7 +1387,7 @@ public class TestHRegion { metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 2, source); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1412,7 +1439,7 @@ public class TestHRegion { @Override public void run() { try { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } catch (IOException e) { throw new RuntimeException(e); } @@ -1448,7 +1475,7 @@ public class TestHRegion { codes[i].getOperationStatusCode()); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1496,7 +1523,7 @@ public class TestHRegion { metricsAssertHelper.assertCounter("syncTimeNumOps", syncs, source); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } @@ -1575,7 +1602,7 @@ public class TestHRegion { .checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new NullComparator(), put, true); assertTrue(res); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1609,7 +1636,7 @@ public class TestHRegion { put, true); assertEquals(false, res); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1642,7 +1669,7 @@ public class TestHRegion { delete, true); assertEquals(true, res); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1737,7 +1764,7 @@ public class TestHRegion { new BinaryComparator(val3), put, true); assertEquals(true, res); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1784,7 +1811,7 @@ public class TestHRegion { assertEquals(expected[i], actual[i]); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1804,7 +1831,7 @@ public class TestHRegion { // expected exception. } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1881,7 +1908,7 @@ public class TestHRegion { r = region.get(get); assertEquals(0, r.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1916,7 +1943,7 @@ public class TestHRegion { Result r = region.get(get); assertEquals(0, r.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -1960,7 +1987,7 @@ public class TestHRegion { } assertEquals("Family " + new String(family) + " does exist", true, ok); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2028,7 +2055,7 @@ public class TestHRegion { result = region.get(get); assertEquals(1, result.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2067,7 +2094,7 @@ public class TestHRegion { result = region.get(get); assertEquals(0, result.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2117,7 +2144,7 @@ public class TestHRegion { assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp", kv.getTimestamp() != HConstants.LATEST_TIMESTAMP); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } @@ -2152,7 +2179,7 @@ public class TestHRegion { } assertTrue("Should catch FailedSanityCheckException", caughtExcep); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2194,7 +2221,7 @@ public class TestHRegion { s.next(results); assertTrue(CellUtil.matchingRow(results.get(0), rowB)); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2253,7 +2280,7 @@ public class TestHRegion { assertArrayEquals(qual1, CellUtil.cloneQualifier(kv)); assertArrayEquals(row, CellUtil.cloneRow(kv)); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2293,7 +2320,7 @@ public class TestHRegion { now = cell.getTimestamp(); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2324,7 +2351,7 @@ public class TestHRegion { } assertFalse(true); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2376,7 +2403,7 @@ public class TestHRegion { res = region.get(g); assertEquals(count, res.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2395,7 +2422,7 @@ public class TestHRegion { assertTrue(r.isEmpty()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2438,14 +2465,14 @@ public class TestHRegion { } finally { for (int i = 0; i < subregions.length; i++) { try { - HRegion.closeHRegion(subregions[i]); + HBaseTestingUtility.closeRegionAndWAL(subregions[i]); } catch (IOException e) { // Ignore. } } } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2511,7 +2538,7 @@ public class TestHRegion { assertTrue("Families could not be found in Region", false); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2537,7 +2564,7 @@ public class TestHRegion { } assertTrue("Families could not be found in Region", ok); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2581,7 +2608,7 @@ public class TestHRegion { is = (RegionScannerImpl) region.getScanner(scan); assertEquals(families.length - 1, ((RegionScannerImpl) is).storeHeap.getHeap().size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2618,7 +2645,7 @@ public class TestHRegion { + e.getMessage()); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2684,7 +2711,7 @@ public class TestHRegion { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected2.get(i), res.get(i))); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2743,7 +2770,7 @@ public class TestHRegion { assertEquals(expected.get(i), actual.get(i)); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2806,7 +2833,7 @@ public class TestHRegion { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2888,7 +2915,7 @@ public class TestHRegion { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -2949,7 +2976,7 @@ public class TestHRegion { assertEquals(expected.get(i), actual.get(i)); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3010,7 +3037,7 @@ public class TestHRegion { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3058,7 +3085,7 @@ public class TestHRegion { assertEquals(false, s.next(results)); assertEquals(0, results.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3137,7 +3164,7 @@ public class TestHRegion { assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3206,7 +3233,7 @@ public class TestHRegion { assertFalse(s.next(results)); assertEquals(results.size(), 0); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3301,7 +3328,7 @@ public class TestHRegion { break; } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3395,7 +3422,7 @@ public class TestHRegion { } } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3434,7 +3461,7 @@ public class TestHRegion { verifyData(regions[1], splitRow, numRows, qualifier, families); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3502,7 +3529,7 @@ public class TestHRegion { verifyData(regions[1], splitRow, numRows, qualifier, families); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3574,7 +3601,7 @@ public class TestHRegion { flushThread.join(); flushThread.checkNoError(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3708,7 +3735,7 @@ public class TestHRegion { flushThread.checkNoError(); } finally { try { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); } catch (DroppedSnapshotException dse) { // We could get this on way out because we interrupt the background flusher and it could // fail anywhere causing a DSE over in the background flusher... only it is not properly @@ -3735,11 +3762,11 @@ public class TestHRegion { } /** - * Block until this thread has put at least one row. + * Block calling thread until this instance of PutThread has put at least one row. */ public void waitForFirstPut() throws InterruptedException { // wait until put thread actually puts some data - while (numPutsFinished == 0) { + while (isAlive() && numPutsFinished == 0) { checkNoError(); Thread.sleep(50); } @@ -3909,7 +3936,7 @@ public class TestHRegion { } ctx.stop(); - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3932,7 +3959,7 @@ public class TestHRegion { g = new Get(row); region.get(g); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -3971,7 +3998,7 @@ public class TestHRegion { ; assertEquals(1L, res.size()); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -4032,7 +4059,7 @@ public class TestHRegion { assertEquals(num_unique_rows, reader.getFilterEntries()); } } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -4075,7 +4102,7 @@ public class TestHRegion { checkOneCell(kvs[2], FAMILY, 0, 0, 2); checkOneCell(kvs[3], FAMILY, 0, 0, 1); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -4118,7 +4145,7 @@ public class TestHRegion { Cell[] keyValues = region.get(get).rawCells(); assertTrue(keyValues.length == 0); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -4228,7 +4255,7 @@ public class TestHRegion { } } } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -4246,11 +4273,10 @@ public class TestHRegion { HRegionInfo hri = new HRegionInfo(htd.getTableName()); // Create a region and skip the initialization (like CreateTableHandler) - HRegion region = HRegion.createHRegion(hri, rootDir, CONF, htd, null, false, true); -// HRegion region = TEST_UTIL.createLocalHRegion(hri, htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, rootDir, CONF, htd, false); Path regionDir = region.getRegionFileSystem().getRegionDir(); FileSystem fs = region.getRegionFileSystem().getFileSystem(); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); Path regionInfoFile = new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE); @@ -4261,7 +4287,7 @@ public class TestHRegion { // Try to open the region region = HRegion.openHRegion(rootDir, hri, htd, null, CONF); assertEquals(regionDir, region.getRegionFileSystem().getRegionDir()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); // Verify that the .regioninfo file is still there assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir", @@ -4275,7 +4301,7 @@ public class TestHRegion { region = HRegion.openHRegion(rootDir, hri, htd, null, CONF); // region = TEST_UTIL.openHRegion(hri, htd); assertEquals(regionDir, region.getRegionFileSystem().getRegionDir()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); // Verify that the .regioninfo file is still there assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir", @@ -4621,7 +4647,7 @@ public class TestHRegion { verify(wal, never()).sync(); } - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } @@ -4652,8 +4678,8 @@ public class TestHRegion { HRegion primaryRegion = null, secondaryRegion = null; try { - primaryRegion = HRegion.createHRegion(primaryHri, - rootDir, TEST_UTIL.getConfiguration(), htd); + primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri, + rootDir, TEST_UTIL.getConfiguration(), htd); // load some data putData(primaryRegion, 0, 1000, cq, families); @@ -4667,10 +4693,10 @@ public class TestHRegion { verifyData(secondaryRegion, 0, 1000, cq, families); } finally { if (primaryRegion != null) { - HRegion.closeHRegion(primaryRegion); + HBaseTestingUtility.closeRegionAndWAL(primaryRegion); } if (secondaryRegion != null) { - HRegion.closeHRegion(secondaryRegion); + HBaseTestingUtility.closeRegionAndWAL(secondaryRegion); } } } @@ -4702,8 +4728,8 @@ public class TestHRegion { HRegion primaryRegion = null, secondaryRegion = null; try { - primaryRegion = HRegion.createHRegion(primaryHri, - rootDir, TEST_UTIL.getConfiguration(), htd); + primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri, + rootDir, TEST_UTIL.getConfiguration(), htd); // load some data putData(primaryRegion, 0, 1000, cq, families); @@ -4722,10 +4748,10 @@ public class TestHRegion { } } finally { if (primaryRegion != null) { - HRegion.closeHRegion(primaryRegion); + HBaseTestingUtility.closeRegionAndWAL(primaryRegion); } if (secondaryRegion != null) { - HRegion.closeHRegion(secondaryRegion); + HBaseTestingUtility.closeRegionAndWAL(secondaryRegion); } } } @@ -4755,8 +4781,8 @@ public class TestHRegion { HRegion primaryRegion = null, secondaryRegion = null; try { - primaryRegion = HRegion.createHRegion(primaryHri, - rootDir, TEST_UTIL.getConfiguration(), htd); + primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri, + rootDir, TEST_UTIL.getConfiguration(), htd); // load some data putData(primaryRegion, 0, 1000, cq, families); @@ -4776,10 +4802,10 @@ public class TestHRegion { verifyData(secondaryRegion, 0, 1000, cq, families); } finally { if (primaryRegion != null) { - HRegion.closeHRegion(primaryRegion); + HBaseTestingUtility.closeRegionAndWAL(primaryRegion); } if (secondaryRegion != null) { - HRegion.closeHRegion(secondaryRegion); + HBaseTestingUtility.closeRegionAndWAL(secondaryRegion); } } } @@ -4930,9 +4956,9 @@ public class TestHRegion { * @param families * @throws IOException * @return A region on which you must call - * {@link HRegion#closeHRegion(HRegion)} when done. + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done. */ - public static HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf, + private static HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf, byte[]... families) throws IOException { return initHRegion(tableName.getName(), null, null, callingMethod, conf, false, families); } @@ -4944,9 +4970,9 @@ public class TestHRegion { * @param families * @throws IOException * @return A region on which you must call - * {@link HRegion#closeHRegion(HRegion)} when done. + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done. */ - public static HRegion initHRegion(byte[] tableName, String callingMethod, Configuration conf, + private static HRegion initHRegion(byte[] tableName, String callingMethod, Configuration conf, byte[]... families) throws IOException { return initHRegion(tableName, null, null, callingMethod, conf, false, families); } @@ -4959,9 +4985,9 @@ public class TestHRegion { * @param families * @throws IOException * @return A region on which you must call - * {@link HRegion#closeHRegion(HRegion)} when done. + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done. */ - public static HRegion initHRegion(byte[] tableName, String callingMethod, Configuration conf, + private static HRegion initHRegion(byte[] tableName, String callingMethod, Configuration conf, boolean isReadOnly, byte[]... families) throws IOException { return initHRegion(tableName, null, null, callingMethod, conf, isReadOnly, families); } @@ -4969,8 +4995,11 @@ public class TestHRegion { private static HRegion initHRegion(byte[] tableName, byte[] startKey, byte[] stopKey, String callingMethod, Configuration conf, boolean isReadOnly, byte[]... families) throws IOException { + Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log"); + HRegionInfo hri = new HRegionInfo(TableName.valueOf(tableName), startKey, stopKey); + final WAL wal = HBaseTestingUtility.createWal(conf, logDir, hri); return initHRegion(tableName, startKey, stopKey, callingMethod, conf, isReadOnly, - Durability.SYNC_WAL, null, families); + Durability.SYNC_WAL, wal, families); } /** @@ -4983,7 +5012,7 @@ public class TestHRegion { * @param families * @throws IOException * @return A region on which you must call - * {@link HRegion#closeHRegion(HRegion)} when done. + * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done. */ private static HRegion initHRegion(byte[] tableName, byte[] startKey, byte[] stopKey, String callingMethod, Configuration conf, boolean isReadOnly, Durability durability, @@ -5060,7 +5089,7 @@ public class TestHRegion { assertFalse(hasNext); scanner.close(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5117,7 +5146,7 @@ public class TestHRegion { assertFalse(hasNext); scanner.close(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5171,7 +5200,7 @@ public class TestHRegion { assertFalse(hasNext); scanner.close(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5249,7 +5278,7 @@ public class TestHRegion { assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); scanner.close(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5329,7 +5358,7 @@ public class TestHRegion { assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); scanner.close(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5490,7 +5519,7 @@ public class TestHRegion { scanner.close(); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5564,7 +5593,7 @@ public class TestHRegion { assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); assertFalse(hasNext); } finally { - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } } @@ -5659,7 +5688,7 @@ public class TestHRegion { assertEquals(verify, startRow - 1); scanner.close(); } finally { - HRegion.closeHRegion(this.region); + this.region.close(); this.region = null; } } @@ -5686,7 +5715,7 @@ public class TestHRegion { region.delete(new Delete(row)); Assert.assertEquals(4L, region.getWriteRequestsCount()); - HRegion.closeHRegion(this.region); + HBaseTestingUtility.closeRegionAndWAL(this.region); this.region = null; } @@ -5706,14 +5735,14 @@ public class TestHRegion { // open the region w/o rss and wal and flush some files HRegion region = - HRegion.createHRegion(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL - .getConfiguration(), htd); + HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL + .getConfiguration(), htd); assertNotNull(region); // create a file in fam1 for the region before opening in OpenRegionHandler region.put(new Put(Bytes.toBytes("a")).add(fam1, fam1, fam1)); region.flushcache(); - region.close(); + HBaseTestingUtility.closeRegionAndWAL(region); ArgumentCaptor editCaptor = ArgumentCaptor.forClass(WALEdit.class); @@ -5757,7 +5786,7 @@ public class TestHRegion { assertEquals(0, store.getStoreFileCount()); // no store files } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -5867,7 +5896,7 @@ public class TestHRegion { } catch (Throwable e) { } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); region = null; CONF.setLong("hbase.busy.wait.duration", defaultBusyWaitDuration); } @@ -5892,9 +5921,9 @@ public class TestHRegion { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS); - HRegion region = HRegion.createHRegion(new HRegionInfo(htd.getTableName(), - HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY), - TEST_UTIL.getDataTestDir(), conf, htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(new HRegionInfo(htd.getTableName(), + HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY), + TEST_UTIL.getDataTestDir(), conf, htd); assertNotNull(region); try { long now = EnvironmentEdgeManager.currentTime(); @@ -5996,7 +6025,7 @@ public class TestHRegion { assertNull(r.getValue(fam1, q1)); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java index c7142fd..5fde726 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java @@ -65,11 +65,11 @@ public class TestHRegionInfo { Path basedir = htu.getDataTestDir(); // Create a region. That'll write the .regioninfo file. FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration()); - HRegion r = HRegion.createHRegion(hri, basedir, htu.getConfiguration(), - fsTableDescriptors.get(TableName.META_TABLE_NAME)); + HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, basedir, htu.getConfiguration(), + fsTableDescriptors.get(TableName.META_TABLE_NAME)); // Get modtime on the file. long modtime = getModTime(r); - HRegion.closeHRegion(r); + HBaseTestingUtility.closeRegionAndWAL(r); Thread.sleep(1001); r = HRegion.openHRegion(basedir, hri, fsTableDescriptors.get(TableName.META_TABLE_NAME), null, htu.getConfiguration()); @@ -80,6 +80,7 @@ public class TestHRegionInfo { HRegionInfo deserializedHri = HRegionFileSystem.loadRegionInfoFileContent( r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir()); assertTrue(hri.equals(deserializedHri)); + HBaseTestingUtility.closeRegionAndWAL(r); } long getModTime(final HRegion r) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java index d30b5f8..81d681c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java @@ -31,12 +31,8 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; @@ -176,26 +172,6 @@ public class TestJoinedScanners { + " seconds, got " + Long.toString(rows_count/2) + " rows"); } - private static HRegion initHRegion(byte[] tableName, byte[] startKey, byte[] stopKey, - String callingMethod, Configuration conf, byte[]... families) - throws IOException { - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); - for(byte [] family : families) { - HColumnDescriptor hcd = new HColumnDescriptor(family); - hcd.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); - htd.addFamily(hcd); - } - HRegionInfo info = new HRegionInfo(htd.getTableName(), startKey, stopKey, false); - Path path = new Path(DIR + callingMethod); - FileSystem fs = FileSystem.get(conf); - if (fs.exists(path)) { - if (!fs.delete(path, true)) { - throw new IOException("Failed delete of " + path); - } - } - return HRegion.createHRegion(info, path, conf, htd); - } - private static Options options = new Options(); /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index 341b02c..49ee7e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -179,7 +179,7 @@ public class TestKeepDeletes { checkResult(r, c0, c0, T1); assertEquals(0, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -229,7 +229,7 @@ public class TestKeepDeletes { scan.next(kvs); assertTrue(kvs.isEmpty()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -274,7 +274,7 @@ public class TestKeepDeletes { // major compaction deleted it assertEquals(0, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -298,7 +298,7 @@ public class TestKeepDeletes { // ok! } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -388,7 +388,7 @@ public class TestKeepDeletes { assertTrue(CellUtil.isDelete(kvs.get(1))); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -431,7 +431,7 @@ public class TestKeepDeletes { region.compactStores(true); assertEquals(0, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -494,7 +494,7 @@ public class TestKeepDeletes { region.compactStores(true); assertEquals(0, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -572,7 +572,7 @@ public class TestKeepDeletes { region.compactStores(true); assertEquals(1, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -652,7 +652,7 @@ public class TestKeepDeletes { checkGet(region, T2, c1, c0, ts+3, T2, T1); checkGet(region, T2, c1, c1, ts+3, T2, T1); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -747,7 +747,7 @@ public class TestKeepDeletes { region.compactStores(true); assertEquals(1, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -797,7 +797,7 @@ public class TestKeepDeletes { assertEquals(4, kvs.size()); scanner.close(); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -876,7 +876,7 @@ public class TestKeepDeletes { region.compactStores(true); assertEquals(0, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } /** @@ -921,7 +921,7 @@ public class TestKeepDeletes { // all delete marker gone assertEquals(0, countDeleteMarkers(region)); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } private void checkGet(HRegion region, byte[] row, byte[] fam, byte[] col, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java index d022acb..16f29dc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java @@ -105,7 +105,7 @@ public class TestMinVersions { r = region.getClosestRowBefore(T2, c0); checkResult(r, c0, T4); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -162,7 +162,7 @@ public class TestMinVersions { r = region.get(g); // this'll use ExplicitColumnTracker checkResult(r, c0, T3,T2,T1); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -221,7 +221,7 @@ public class TestMinVersions { r = region.get(g); // this'll use ExplicitColumnTracker checkResult(r, c0, T3); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -296,7 +296,7 @@ public class TestMinVersions { r = region.get(g); // this'll use ExplicitColumnTracker checkResult(r, c0, T5,T4); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -388,7 +388,7 @@ public class TestMinVersions { r = region.get(g); assertTrue(r.isEmpty()); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } @@ -463,7 +463,7 @@ public class TestMinVersions { r = region.get(g); checkResult(r, c0, T2); } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index 51cc9d5..9f0b339 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -285,7 +285,7 @@ public class TestMultiColumnScanner { "pairs", lastDelTimeMap.size() > 0); LOG.info("Number of row/col pairs deleted at least once: " + lastDelTimeMap.size()); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } private static String getRowQualStr(Cell kv) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java index ae8f64f..e3f51ea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java @@ -65,8 +65,6 @@ import com.google.common.hash.Hashing; public class TestPerColumnFamilyFlush { private static final Log LOG = LogFactory.getLog(TestPerColumnFamilyFlush.class); - HRegion region = null; - private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Path DIR = TEST_UTIL.getDataTestDir("TestHRegion"); @@ -82,14 +80,14 @@ public class TestPerColumnFamilyFlush { public static final byte[] FAMILY3 = families[2]; - private void initHRegion(String callingMethod, Configuration conf) throws IOException { + private HRegion initHRegion(String callingMethod, Configuration conf) throws IOException { HTableDescriptor htd = new HTableDescriptor(TABLENAME); for (byte[] family : families) { htd.addFamily(new HColumnDescriptor(family)); } HRegionInfo info = new HRegionInfo(TABLENAME, null, null, false); Path path = new Path(DIR, callingMethod); - region = HRegion.createHRegion(info, path, conf, htd); + return HBaseTestingUtility.createRegionAndWAL(info, path, conf, htd); } // A helper function to create puts. @@ -129,7 +127,7 @@ public class TestPerColumnFamilyFlush { conf.set(FlushPolicyFactory.HBASE_FLUSH_POLICY_KEY, FlushLargeStoresPolicy.class.getName()); conf.setLong(FlushLargeStoresPolicy.HREGION_COLUMNFAMILY_FLUSH_SIZE_LOWER_BOUND, 100 * 1024); // Intialize the HRegion - initHRegion("testSelectiveFlushWhenEnabled", conf); + HRegion region = initHRegion("testSelectiveFlushWhenEnabled", conf); // Add 1200 entries for CF1, 100 for CF2 and 50 for CF3 for (int i = 1; i <= 1200; i++) { region.put(createPut(1, i)); @@ -257,6 +255,7 @@ public class TestPerColumnFamilyFlush { // Since we won't find any CF above the threshold, and hence no specific // store to flush, we should flush all the memstores. assertEquals(0, region.getMemstoreSize().get()); + HBaseTestingUtility.closeRegionAndWAL(region); } @Test (timeout=180000) @@ -267,7 +266,7 @@ public class TestPerColumnFamilyFlush { conf.set(FlushPolicyFactory.HBASE_FLUSH_POLICY_KEY, FlushAllStoresPolicy.class.getName()); // Intialize the HRegion - initHRegion("testSelectiveFlushWhenNotEnabled", conf); + HRegion region = initHRegion("testSelectiveFlushWhenNotEnabled", conf); // Add 1200 entries for CF1, 100 for CF2 and 50 for CF3 for (int i = 1; i <= 1200; i++) { region.put(createPut(1, i)); @@ -312,6 +311,7 @@ public class TestPerColumnFamilyFlush { assertEquals(DefaultMemStore.DEEP_OVERHEAD, cf3MemstoreSize); assertEquals(0, totalMemstoreSize); assertEquals(HConstants.NO_SEQNUM, smallestSeqInRegionCurrentMemstore); + HBaseTestingUtility.closeRegionAndWAL(region); } // Find the (first) region which has the specified name. @@ -585,7 +585,7 @@ public class TestPerColumnFamilyFlush { table.close(); conn.close(); - region = getRegionWithName(TABLENAME).getFirst(); + HRegion region = getRegionWithName(TABLENAME).getFirst(); cf1StoreFileCount1 = region.getStore(FAMILY1).getStorefilesCount(); cf2StoreFileCount1 = region.getStore(FAMILY2).getStorefilesCount(); cf3StoreFileCount1 = region.getStore(FAMILY3).getStorefilesCount(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java index 8b5b4a3..8bcd89e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java @@ -248,7 +248,7 @@ public class TestRegionMergeTransaction { assertEquals((rowCountOfRegionA + rowCountOfRegionB), mergedRegionRowCount); } finally { - HRegion.closeHRegion(mergedRegion); + HBaseTestingUtility.closeRegionAndWAL(mergedRegion); } // Assert the write lock is no longer held on region_a and region_b assertTrue(!this.region_a.lock.writeLock().isHeldByCurrentThread()); @@ -308,7 +308,7 @@ public class TestRegionMergeTransaction { assertEquals((rowCountOfRegionA + rowCountOfRegionB), mergedRegionRowCount); } finally { - HRegion.closeHRegion(mergedRegion); + HBaseTestingUtility.closeRegionAndWAL(mergedRegion); } // Assert the write lock is no longer held on region_a and region_b assertTrue(!this.region_a.lock.writeLock().isHeldByCurrentThread()); @@ -412,9 +412,9 @@ public class TestRegionMergeTransaction { HColumnDescriptor hcd = new HColumnDescriptor(CF); htd.addFamily(hcd); HRegionInfo hri = new HRegionInfo(htd.getTableName(), startrow, endrow); - HRegion a = HRegion.createHRegion(hri, testdir, + HRegion a = HBaseTestingUtility.createRegionAndWAL(hri, testdir, TEST_UTIL.getConfiguration(), htd); - HRegion.closeHRegion(a); + HBaseTestingUtility.closeRegionAndWAL(a); return HRegion.openHRegion(testdir, hri, htd, wals.getWAL(hri.getEncodedNameAsBytes()), TEST_UTIL.getConfiguration()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java index 27bdda0..dd7ef29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java @@ -69,7 +69,7 @@ public class TestResettingCounters { throw new IOException("Failed delete of " + path); } } - HRegion region = HRegion.createHRegion(hri, path, conf, htd); + HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, path, conf, htd); try { Increment odd = new Increment(rows[0]); odd.setDurability(Durability.SKIP_WAL); @@ -100,9 +100,9 @@ public class TestResettingCounters { assertEquals(6, Bytes.toLong(CellUtil.cloneValue(kvs[i]))); } } finally { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index 5a95df11..c20ec1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -37,10 +37,13 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.client.Durability; @@ -308,12 +311,11 @@ public class TestReversibleScanners { @Test public void testReversibleRegionScanner() throws IOException { - byte[] tableName = Bytes.toBytes("testtable"); byte[] FAMILYNAME2 = Bytes.toBytes("testCf2"); - Configuration conf = HBaseConfiguration.create(); - HRegion region = TEST_UTIL.createLocalHRegion(tableName, null, null, - "testReversibleRegionScanner", conf, false, Durability.SYNC_WAL, null, - FAMILYNAME, FAMILYNAME2); + HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testtable")) + .addFamily(new HColumnDescriptor(FAMILYNAME)) + .addFamily(new HColumnDescriptor(FAMILYNAME2)); + HRegion region = TEST_UTIL.createLocalHRegion(htd, null, null); loadDataToRegion(region, FAMILYNAME2); // verify row count with forward scan diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java index afe02be..9163e61 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java @@ -113,7 +113,7 @@ public class TestScanWithBloomError { createStoreFile(new int[] {1, 9}); scanColSet(new int[]{1, 4, 6, 7}, new int[]{1, 6, 7}); - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } private void scanColSet(int[] colSet, int[] expectedResultCols) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java index 08b8dcc..72f556e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -162,7 +162,7 @@ public class TestScanner { assertTrue(count > 10); s.close(); } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } @@ -215,7 +215,7 @@ public class TestScanner { rowInclusiveStopFilter(scan, stopRow); } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } @@ -242,7 +242,7 @@ public class TestScanner { return; } } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } @@ -355,7 +355,7 @@ public class TestScanner { } finally { // clean up - HRegion.closeHRegion(r); + HBaseTestingUtility.closeRegionAndWAL(r); } } @@ -475,7 +475,7 @@ public class TestScanner { LOG.error("Failed", e); throw e; } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } @@ -498,7 +498,7 @@ public class TestScanner { LOG.error("Failed", e); throw e; } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } @@ -555,7 +555,7 @@ public class TestScanner { assertTrue(CellUtil.matchingFamily(results.get(0), fam1)); assertTrue(CellUtil.matchingFamily(results.get(1), fam2)); } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index ec81ac1..ffc76f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -439,7 +439,7 @@ public class TestSeekOptimizations { @After public void tearDown() throws IOException { if (region != null) { - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); } // We have to re-set the lazy seek flag back to the default so that other diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java index 66375e9..9c9fa6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java @@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; @@ -248,7 +247,7 @@ public class TestSplitTransaction { assertTrue(count > 0 && count != rowcount); daughtersRowCount += count; } finally { - HRegion.closeHRegion(openRegion); + HBaseTestingUtility.closeRegionAndWAL(openRegion); } } assertEquals(rowcount, daughtersRowCount); @@ -332,7 +331,7 @@ public class TestSplitTransaction { assertTrue(count > 0 && count != rowcount); daughtersRowCount += count; } finally { - HRegion.closeHRegion(openRegion); + HBaseTestingUtility.closeRegionAndWAL(openRegion); } } assertEquals(rowcount, daughtersRowCount); @@ -376,8 +375,9 @@ public class TestSplitTransaction { HColumnDescriptor hcd = new HColumnDescriptor(CF); htd.addFamily(hcd); HRegionInfo hri = new HRegionInfo(htd.getTableName(), STARTROW, ENDROW); - HRegion r = HRegion.createHRegion(hri, testdir, TEST_UTIL.getConfiguration(), htd); - HRegion.closeHRegion(r); + HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, testdir, TEST_UTIL.getConfiguration(), + htd); + HBaseTestingUtility.closeRegionAndWAL(r); return HRegion.openHRegion(testdir, hri, htd, wals.getWAL(hri.getEncodedNameAsBytes()), TEST_UTIL.getConfiguration()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java index b929cfe..ba05e9a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java @@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -138,7 +139,7 @@ public class TestWideScanner extends HBaseTestCase { s.close(); } finally { - HRegion.closeHRegion(this.r); + HBaseTestingUtility.closeRegionAndWAL(this.r); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java index 970b0f2..2c25e3b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java @@ -37,7 +37,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang.mutable.MutableBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -68,10 +67,6 @@ import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.wal.DefaultWALProvider; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALKey; -import org.apache.hadoop.hdfs.DFSClient; -import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.apache.hadoop.hdfs.server.namenode.LeaseManager; -import org.apache.log4j.Level; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -395,7 +390,7 @@ public class TestFSHLog { * flush. The addition of the sync over HRegion in flush should fix an issue where flush was * returning before all of its appends had made it out to the WAL (HBASE-11109). * @throws IOException - * @see HBASE-11109 + * @see HBASE-11109 */ @Test public void testFlushSequenceIdIsGreaterThanAllEditsInHFile() throws IOException { @@ -405,9 +400,9 @@ public class TestFSHLog { final byte[] rowName = tableName.getName(); final HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor("f")); - HRegion r = HRegion.createHRegion(hri, TEST_UTIL.getDefaultRootDirPath(), - TEST_UTIL.getConfiguration(), htd); - HRegion.closeHRegion(r); + HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDefaultRootDirPath(), + TEST_UTIL.getConfiguration(), htd); + HBaseTestingUtility.closeRegionAndWAL(r); final int countPerFamily = 10; final MutableBoolean goslow = new MutableBoolean(false); // subclass and doctor a method. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java index c441969..bb634d1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java @@ -57,7 +57,6 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -273,7 +272,7 @@ public class TestWALReplay { /** * Tests for hbase-2727. * @throws Exception - * @see https://issues.apache.org/jira/browse/HBASE-2727 + * @see HBASE-2727 */ @Test public void test2727() throws Exception { @@ -286,9 +285,8 @@ public class TestWALReplay { deleteDir(basedir); HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); - HRegion.closeHRegion(region2); + HRegion region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region2); final byte [] rowName = tableName.getName(); WAL wal1 = createWAL(this.conf); @@ -348,9 +346,8 @@ public class TestWALReplay { final Path basedir = new Path(this.hbaseRootDir, tableName.getNameAsString()); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); - HRegion.closeHRegion(region2); + HRegion region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region2); WAL wal = createWAL(this.conf); HRegion region = HRegion.openHRegion(hri, htd, wal, this.conf); @@ -415,9 +412,8 @@ public class TestWALReplay { final Path basedir = new Path(this.hbaseRootDir, tableName.getNameAsString()); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); - HRegion.closeHRegion(region2); + HRegion region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region2); WAL wal = createWAL(this.conf); HRegion region = HRegion.openHRegion(hri, htd, wal, this.conf); @@ -488,9 +484,8 @@ public class TestWALReplay { final byte[] rowName = tableName.getName(); final int countPerFamily = 10; final HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region3 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); - HRegion.closeHRegion(region3); + HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region3); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as // others to test we do right thing when different seqids. @@ -598,9 +593,8 @@ public class TestWALReplay { final byte[] rowName = tableName.getName(); final int countPerFamily = 10; final HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region3 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); - HRegion.closeHRegion(region3); + HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region3); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as // others to test we do right thing when different seqids. @@ -681,9 +675,8 @@ public class TestWALReplay { final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName); deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region3 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, htd); - region3.close(); - region3.getWAL().close(); + HRegion region3 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region3); // Write countPerFamily edits into the three families. Do a flush on one // of the families during the load of edits so its seqid is not same as // others to test we do right thing when different seqids. @@ -780,9 +773,8 @@ public class TestWALReplay { deleteDir(basedir); final HTableDescriptor htd = createBasic3FamilyHTD(tableName); - HRegion region2 = HRegion.createHRegion(hri, - hbaseRootDir, this.conf, htd); - HRegion.closeHRegion(region2); + HRegion region2 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); + HBaseTestingUtility.closeRegionAndWAL(region2); final WAL wal = createWAL(this.conf); final byte[] rowName = tableName.getName(); final byte[] regionName = hri.getEncodedNameAsBytes(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java index 69e5d70..6a86580 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java @@ -31,9 +31,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -143,7 +140,8 @@ public class TestMergeTable { byte [] startKey, byte [] endKey, int firstRow, int nrows, Path rootdir) throws IOException { HRegionInfo hri = new HRegionInfo(desc.getTableName(), startKey, endKey); - HRegion region = HRegion.createHRegion(hri, rootdir, UTIL.getConfiguration(), desc); + HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, rootdir, UTIL.getConfiguration(), + desc); LOG.info("Created region " + region.getRegionNameAsString()); for(int i = firstRow; i < firstRow + nrows; i++) { Put put = new Put(Bytes.toBytes("row_" + String.format("%1$05d", i))); @@ -155,19 +153,19 @@ public class TestMergeTable { region.flushcache(); } } - HRegion.closeHRegion(region); + HBaseTestingUtility.closeRegionAndWAL(region); return region; } protected void setupMeta(Path rootdir, final HRegion [] regions) throws IOException { HRegion meta = - HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, rootdir, - UTIL.getConfiguration(), UTIL.getMetaTableDescriptor()); + HBaseTestingUtility.createRegionAndWAL(HRegionInfo.FIRST_META_REGIONINFO, rootdir, + UTIL.getConfiguration(), UTIL.getMetaTableDescriptor()); for (HRegion r: regions) { HRegion.addRegionToMETA(meta, r); } - HRegion.closeHRegion(meta); + HBaseTestingUtility.closeRegionAndWAL(meta); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java index 0f6d697..b04e5de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java @@ -156,7 +156,8 @@ public class TestMergeTool extends HBaseTestCase { */ for (int i = 0; i < sourceRegions.length; i++) { regions[i] = - HRegion.createHRegion(this.sourceRegions[i], testDir, this.conf, this.desc); + HBaseTestingUtility.createRegionAndWAL(this.sourceRegions[i], testDir, this.conf, + this.desc); /* * Insert data */ @@ -183,7 +184,7 @@ public class TestMergeTool extends HBaseTestCase { for (int i = 0; i < sourceRegions.length; i++) { HRegion r = regions[i]; if (r != null) { - HRegion.closeHRegion(r); + HBaseTestingUtility.closeRegionAndWAL(r); } } wals.close(); @@ -272,7 +273,7 @@ public class TestMergeTool extends HBaseTestCase { assertTrue(Bytes.equals(bytes, rows[i][j])); } // Close the region and delete the log - HRegion.closeHRegion(regions[i]); + HBaseTestingUtility.closeRegionAndWAL(regions[i]); } WAL log = wals.getWAL(new byte[]{}); // Merge Region 0 and Region 1 -- 1.9.3 (Apple Git-50)