diff --git hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java index 8a60e6717d..1259dfb5d7 100644 --- hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java +++ hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java @@ -32,6 +32,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.BackupAdmin; import org.apache.hadoop.hbase.backup.BackupClientFactory; @@ -46,6 +48,7 @@ import org.apache.hadoop.hbase.backup.HBackupFileSystem; import org.apache.hadoop.hbase.backup.RestoreRequest; import org.apache.hadoop.hbase.backup.util.BackupSet; import org.apache.hadoop.hbase.backup.util.BackupUtils; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; @@ -558,6 +561,8 @@ public class BackupAdminImpl implements BackupAdmin { throw new IOException("Target backup directory " + targetTableBackupDir + " exists already."); } + + checkBackupAccess(outputFs, targetTableBackupDirPath); } ArrayList nonExistingTableList = null; try (Admin admin = conn.getAdmin();) { @@ -603,6 +608,23 @@ public class BackupAdminImpl implements BackupAdmin { return backupId; } + @VisibleForTesting + public static void checkBackupAccess(FileSystem fs, Path path) throws IOException { + if (fs.exists(path)) { + // check if the path is writeable + fs.access(path, FsAction.WRITE); + } else { + Path prev = path; + while (prev != null) { + if (fs.exists(prev)) { + fs.access(prev, FsAction.WRITE); + break; + } + prev = prev.getParent(); + } + } + } + private List excludeNonExistingTables(List tableList, List nonExistingTableList) { diff --git hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSmallTests.java hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSmallTests.java new file mode 100644 index 0000000000..b78f97554e --- /dev/null +++ hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSmallTests.java @@ -0,0 +1,35 @@ +package org.apache.hadoop.hbase.backup; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl; +import org.apache.hadoop.hdfs.DFSTestUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.junit.Test; + +import java.io.IOException; + +public class TestBackupSmallTests extends TestBackupBase { + private static final UserGroupInformation DIANA = + UserGroupInformation.createUserForTesting("diana", new String[] {}); + private static final String PERMISSION_TEST_PATH = Path.SEPARATOR + "permissionUT"; + + + @Test public void testBackupPathIsAccessible() throws Exception { + Path path = new Path(PERMISSION_TEST_PATH); + FileSystem fs = FileSystem.get(TEST_UTIL.getConnection().getConfiguration()); + BackupAdminImpl.checkBackupAccess(fs, path); + } + + @Test(expected = IOException.class) public void testBackupPathIsNotAccessible() throws Exception { + Path path = new Path(PERMISSION_TEST_PATH); + FileSystem rootFs = FileSystem.get(TEST_UTIL.getConnection().getConfiguration()); + rootFs.mkdirs(path); + rootFs.setPermission(path, FsPermission.createImmutable((short)000)); + FileSystem fs = DFSTestUtil.getFileSystemAs(DIANA, TEST_UTIL.getConnection().getConfiguration()); + BackupAdminImpl.checkBackupAccess(fs, path); + } +}