Index: src/main/java/org/apache/hadoop/hbase/util/FSUtils.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (revision 1335303) +++ src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (working copy) @@ -23,6 +23,7 @@ import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -47,8 +48,15 @@ import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.wal.HLog; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.SequenceFile.CompressionType; +import org.apache.hadoop.io.SequenceFile.Metadata; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.io.compress.DefaultCodec; +import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; @@ -154,6 +162,52 @@ } /** + * Create the specified file on the filesystem. By default, this will: + *
    + *
  1. apply the umask in the configuration (if it is enabled)
  2. + *
  3. use the fs configured buffer size (or {@value DEFAULT_BUFFER_SIZE} if + * not set)
  4. + *
  5. use the default replication
  6. + *
  7. use the default block size
  8. + *
  9. not track progress
  10. + *
+ * + * @param fs {@link FileSystem} on which to write the file + * @param path {@link Path} to the file to write + * @param perm + * @param overwrite Whether or not the created file should be overwritten. + * @return output stream to the created file + * @throws IOException if the file cannot be created + */ + public static FSDataOutputStream create(FileSystem fs, Path path, + FsPermission perm, boolean overwrite, boolean force) throws IOException { + LOG.debug("Creating file:" + path + "with permission:" + perm); + + try { + // use reflection + return (FSDataOutputStream) FileSystem.class.getMethod( + "create", + new Class[] { Path.class, FsPermission.class, Boolean.TYPE, + Boolean.TYPE, Integer.TYPE, Short.TYPE, Long.TYPE, + Progressable.class}).invoke( + fs, + new Object[] { path, perm, overwrite, force, + fs.getConf().getInt("io.file.buffer.size", 4096), + fs.getDefaultReplication(), fs.getDefaultBlockSize(), null }); + } catch (InvocationTargetException ite) { + // function was properly called, but threw it's own exception + throw new IOException(ite.getCause()); + } catch (Exception e) { + // ignore all other exceptions. related to reflection failure + } + LOG.debug("new createWriter -- HDFS-744 -- not available"); + + return fs.create(path, perm, overwrite, + fs.getConf().getInt("io.file.buffer.size", 4096), + fs.getDefaultReplication(), fs.getDefaultBlockSize(), null); + } + + /** * Get the file permissions specified in the configuration, if they are * enabled. * Index: src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java (revision 1335303) +++ src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java (working copy) @@ -155,7 +155,7 @@ .getMethod("createWriter", new Class[] {FileSystem.class, Configuration.class, Path.class, Class.class, Class.class, Integer.TYPE, Short.TYPE, Long.TYPE, Boolean.TYPE, - CompressionType.class, CompressionCodec.class, Metadata.class}) + CompressionType.class, CompressionCodec.class, Metadata.class, Boolean.TYPE}) .invoke(null, new Object[] {fs, conf, path, HLog.getKeyClass(conf), WALEdit.class, Integer.valueOf(fs.getConf().getInt("io.file.buffer.size", 4096)), @@ -164,9 +164,10 @@ fs.getDefaultReplication())), Long.valueOf(conf.getLong("hbase.regionserver.hlog.blocksize", fs.getDefaultBlockSize())), - Boolean.valueOf(false) /*createParent*/, + Boolean.FALSE /*createParent*/, SequenceFile.CompressionType.NONE, new DefaultCodec(), - createMetadata(conf, compress) + createMetadata(conf, compress), + Boolean.TRUE /* force */ }); } catch (InvocationTargetException ite) { // function was properly called, but threw it's own exception @@ -175,6 +176,36 @@ // ignore all other exceptions. related to reflection failure } + if (this.writer == null) { + LOG.debug("new createWriter -- HDFS-744 -- not available"); + try { + // reflection for a version of SequenceFile.createWriter that doesn't + // automatically create the parent directory (see HBASE-2312) + this.writer = (SequenceFile.Writer) SequenceFile.class + .getMethod("createWriter", new Class[] {FileSystem.class, + Configuration.class, Path.class, Class.class, Class.class, + Integer.TYPE, Short.TYPE, Long.TYPE, Boolean.TYPE, + CompressionType.class, CompressionCodec.class, Metadata.class}) + .invoke(null, new Object[] {fs, conf, path, HLog.getKeyClass(conf), + WALEdit.class, + Integer.valueOf(fs.getConf().getInt("io.file.buffer.size", 4096)), + Short.valueOf((short) + conf.getInt("hbase.regionserver.hlog.replication", + fs.getDefaultReplication())), + Long.valueOf(conf.getLong("hbase.regionserver.hlog.blocksize", + fs.getDefaultBlockSize())), + Boolean.valueOf(false) /*createParent*/, + SequenceFile.CompressionType.NONE, new DefaultCodec(), + createMetadata(conf, compress) + }); + } catch (InvocationTargetException ite) { + // function was properly called, but threw it's own exception + throw new IOException(ite.getCause()); + } catch (Exception e) { + // ignore all other exceptions. related to reflection failure + } + } + // if reflection failed, use the old createWriter if (this.writer == null) { LOG.debug("new createWriter -- HADOOP-6840 -- not available"); Index: src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java (revision 1335303) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java (working copy) @@ -268,6 +268,6 @@ FileSystem fs, Path path) throws IOException { FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY); - return FSUtils.create(fs, path, perms); + return FSUtils.create(fs, path, perms, true, true); } }