diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java index c23974c..077d217 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java @@ -196,7 +196,7 @@ public class HFileArchiver { if (!resolveAndArchive(fs, storeArchiveDir, storeFiles)) { throw new IOException("Failed to archive/delete all the files for region:" + Bytes.toString(parent.getRegionName()) + ", family:" + Bytes.toString(family) - + " into " + storeArchiveDir + "Something is probably arwy on the filesystem."); + + " into " + storeArchiveDir + "; something awry on the filesystem."); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java index 1cb8b09..562bc4a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java @@ -68,6 +68,12 @@ class Compactor extends Configured { // Also calculate earliest put timestamp if major compaction int maxKeyCount = 0; long earliestPutTs = HConstants.LATEST_TIMESTAMP; + // REMOVE + for (StoreFile file: filesToCompact) { + if (!store.getFileSystem().exists(file.getPath())) { + LOG.warn("FILE DOES NOT EXIST: " + file); + } + } for (StoreFile file: filesToCompact) { StoreFile.Reader r = file.getReader(); if (r == null) { @@ -106,8 +112,7 @@ class Compactor extends Configured { this.progress = new CompactionProgress(maxKeyCount); // For each file, obtain a scanner: - List scanners = StoreFileScanner - .getScannersForStoreFiles(filesToCompact, false, false, true); + List scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact, false, false, true); // Get some configs int compactionKVMax = getConf().getInt(HConstants.COMPACTION_KV_MAX, 10); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 412abf6..09e846e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -1204,7 +1204,7 @@ public class HRegion implements HeapSize { // , Writable{ * Do preparation for pending compaction. * @throws IOException */ - void doRegionCompactionPrep() throws IOException { + protected void doRegionCompactionPrep() throws IOException { } /* diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index dd716d3..98b0ffb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -90,8 +90,7 @@ public class StoreFileScanner implements KeyValueScanner { public static List getScannersForStoreFiles( Collection files, boolean cacheBlocks, boolean usePread, boolean isCompaction) throws IOException { - return getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, - null); + return getScannersForStoreFiles(files, cacheBlocks, usePread, isCompaction, null); } /** @@ -106,8 +105,7 @@ public class StoreFileScanner implements KeyValueScanner { files.size()); for (StoreFile file : files) { StoreFile.Reader r = file.createReader(); - StoreFileScanner scanner = r.getStoreFileScanner(cacheBlocks, usePread, - isCompaction); + StoreFileScanner scanner = r.getStoreFileScanner(cacheBlocks, usePread, isCompaction); scanner.setScanQueryMatcher(matcher); scanners.add(scanner); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index a491b44..8bea305 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1206,6 +1206,16 @@ public class HBaseTestingUtility { return digest.toString(); } + public void loadNumericRows(final HTable t, final byte[] f, + int startRow, int endRow) throws IOException { + for (int i = startRow; i < endRow; i++) { + byte[] data = Bytes.toBytes(String.valueOf(i)); + Put put = new Put(data); + put.add(f, null, data); + t.put(put); + } + } + /** * Creates many regions names "aaa" to "zzz". * @@ -1626,7 +1636,7 @@ public class HBaseTestingUtility { //ensure that we have connection to the server before closing down, otherwise //the close session event will be eaten out before we start CONNECTING state long start = System.currentTimeMillis(); - while (newZK.getState() != States.CONNECTED + while (newZK.getState() != States.CONNECTED && System.currentTimeMillis() - start < 1000) { Thread.sleep(1); } @@ -1980,6 +1990,7 @@ public class HBaseTestingUtility { LOG.info("Found=" + rows); Threads.sleep(200); } + meta.close(); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java index 8f7f3aa..737c9ee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java @@ -298,7 +298,20 @@ public class MiniHBaseCluster extends HBaseCluster { } return t; } - + + public List findRegionsForTable(byte[] tableName) { + ArrayList ret = new ArrayList(); + for (JVMClusterUtil.RegionServerThread rst : getRegionServerThreads()) { + HRegionServer hrs = rst.getRegionServer(); + + for (HRegion region : hrs.getOnlineRegions(tableName)) { + if (Bytes.equals(region.getTableDesc().getName(), tableName)) { + ret.add(region); + } + } + } + return ret; + } /** * Cause a region server to exit doing basic clean up only on its way out. * @param serverNumber Used as index into a list. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java index 3da3e8a..d9f9af6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java @@ -99,14 +99,7 @@ public class TestFullLogReconstruction { // Load up the table with simple rows and count them int initialCount = TEST_UTIL.loadTable(table, FAMILY); - Scan scan = new Scan(); - ResultScanner results = table.getScanner(scan); - int count = 0; - for (Result res : results) { - count++; - } - results.close(); - + int count = TEST_UTIL.countRows(table); assertEquals(initialCount, count); for(int i = 0; i < 4; i++) { @@ -114,14 +107,8 @@ public class TestFullLogReconstruction { } TEST_UTIL.expireRegionServerSession(0); - scan = new Scan(); - results = table.getScanner(scan); - int newCount = 0; - for (Result res : results) { - newCount++; - } + int newCount = TEST_UTIL.countRows(table); assertEquals(count, newCount); - results.close(); table.close(); }