Index: src/main/java/org/apache/hadoop/hbase/master/LogCleaner.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/master/LogCleaner.java (revision 1066547) +++ src/main/java/org/apache/hadoop/hbase/master/LogCleaner.java (working copy) @@ -37,16 +37,12 @@ import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS; /** - * This Chore, everytime it runs, will clear the wal logs in the old logs folder - * that are deletable for each log cleaner in the chain, in order to limit the - * number of deletes it sends, will only delete maximum 20 in a single run. + * This Chore, everytime it runs, will clear the HLogs in the old logs folder + * that are deletable for each log cleaner in the chain. */ public class LogCleaner extends Chore { static final Log LOG = LogFactory.getLog(LogCleaner.class.getName()); - // Max number we can delete on every chore, this is to make sure we don't - // issue thousands of delete commands around the same time - private final int maxDeletedLogs; private final FileSystem fs; private final Path oldLogDir; private List logCleanersChain; @@ -64,9 +60,6 @@ Configuration conf, FileSystem fs, Path oldLogDir) { super("LogsCleaner", p, s); - - this.maxDeletedLogs = - conf.getInt("hbase.master.logcleaner.maxdeletedlogs", 20); this.fs = fs; this.oldLogDir = oldLogDir; this.conf = conf; @@ -127,7 +120,6 @@ try { FileStatus [] files = this.fs.listStatus(this.oldLogDir); if (files == null) return; - int nbDeletedLog = 0; FILE: for (FileStatus file : files) { Path filePath = file.getPath(); if (HLog.validateHLogFilename(filePath.getName())) { @@ -144,16 +136,11 @@ } // delete this log file if it passes all the log cleaners this.fs.delete(filePath, true); - nbDeletedLog++; } else { LOG.warn("Found a wrongly formated file: " + file.getPath().getName()); this.fs.delete(filePath, true); - nbDeletedLog++; } - if (nbDeletedLog >= maxDeletedLogs) { - break; - } } } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); Index: src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java (revision 1066547) +++ src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java (working copy) @@ -112,15 +112,8 @@ assertEquals(34, fs.listStatus(oldLogDir).length); - // This will take care of 20 old log files (default max we can delete) cleaner.chore(); - assertEquals(14, fs.listStatus(oldLogDir).length); - - // We will delete all remaining log files which are not scheduled for - // replication and those that are invalid - cleaner.chore(); - // We end up with the current log file, a newer one and the 3 old log // files which are scheduled for replication assertEquals(5, fs.listStatus(oldLogDir).length);