Index: src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (revision 1327371) +++ src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (working copy) @@ -766,6 +766,15 @@ // and then create the file Path tmpPath = new Path(getTmpDir(), REGIONINFO_FILE); + + // if datanode crashes or if the RS goes down just before the close is called while trying to + // close the created regioninfo file in the .tmp directory then on next + // creation we will be getting AlreadyCreatedException. + // Hence delete and create the file if exists. + if (FSUtils.isExists(fs, tmpPath)) { + FSUtils.delete(fs, tmpPath, true); + } + FSDataOutputStream out = FSUtils.create(fs, tmpPath, perms); try { Index: src/main/java/org/apache/hadoop/hbase/util/FSUtils.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (revision 1327371) +++ src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (working copy) @@ -1012,4 +1012,30 @@ if (status == null || status.length < 1) return null; return status; } + + /** + * Calls fs.delete() and returns the value returned by the fs.delete() + * + * @param fs + * @param path + * @param recursive + * @return + * @throws IOException + */ + public static boolean delete(final FileSystem fs, final Path path, final boolean recursive) + throws IOException { + return fs.delete(path, recursive); + } + + /** + * Calls fs.exists(). Checks if the specified path exists + * + * @param fs + * @param path + * @return + * @throws IOException + */ + public static boolean isExists(final FileSystem fs, final Path path) throws IOException { + return fs.exists(path); + } } Index: src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java (revision 1327371) +++ src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java (working copy) @@ -185,6 +185,35 @@ fs.delete(p, true); } } + + @Test + public void testDeleteAndExists() throws Exception { + Configuration conf = HBaseConfiguration.create(); + conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true); + FileSystem fs = FileSystem.get(conf); + FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY); + // then that the correct file is created + String file = UUID.randomUUID().toString(); + Path p = new Path("temptarget" + File.separator + file); + Path p1 = new Path("temppath" + File.separator + file); + try { + FSDataOutputStream out = FSUtils.create(fs, p, perms); + out.close(); + assertTrue("The created file should be present", FSUtils.isExists(fs, p)); + // delete the file with recursion as false. Only the file will be deleted. + FSUtils.delete(fs, p, false); + // Create another file + FSDataOutputStream out1 = FSUtils.create(fs, p1, perms); + out1.close(); + // delete the file with recursion as false. Still the file only will be deleted + FSUtils.delete(fs, p1, true); + assertFalse("The created file should be present", FSUtils.isExists(fs, p1)); + // and then cleanup + } finally { + FSUtils.delete(fs, p, true); + FSUtils.delete(fs, p1, true); + } + } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =