diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 0a0fdf4..ce5c7ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -175,8 +175,6 @@ import org.mortbay.jetty.nio.SelectChannelConnector; import org.mortbay.jetty.servlet.Context; import com.google.common.annotations.VisibleForTesting; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.protobuf.Descriptors; @@ -2612,6 +2610,14 @@ public class HMaster extends HRegionServer implements MasterServices { return procInfoList; } + private String concat(List tableList) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < tableList.size()-1; i++) { + sb.append(tableList.get(i)).append(", "); + } + sb.append(tableList.get(tableList.size()-1)); + return sb.toString(); + } @Override public Pair backupTables(final BackupType type, List tableList, final String targetRootDir, final int workers, @@ -2632,6 +2638,12 @@ public class HMaster extends HRegionServer implements MasterServices { throw new DoNotRetryIOException("No table covered by incremental backup."); } + tableList.removeAll(incrTableSet); + if (!tableList.isEmpty()) { + String extraTables = concat(tableList); + LOG.error("Some tables (" + extraTables + ") haven't gone through full backup"); + throw new DoNotRetryIOException("Perform full backup on " + extraTables + " first"); + } LOG.info("Incremental backup for the following table set: " + incrTableSet); tableList = Lists.newArrayList(incrTableSet); }