Index: hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java (revision 1365832) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java (working copy) @@ -316,6 +316,9 @@ List splitList = new ArrayList(); for (FileStatus file: listStatus(job)) { + if (file.isDir()) { + continue; + } Path path = file.getPath(); FileSystem fs = path.getFileSystem(job.getConfiguration()); FSDataInputStream fileIn = fs.open(path); @@ -644,7 +647,7 @@ job.setNumReduceTasks(1); job.setOutputFormatClass(TextOutputFormat.class); - TextOutputFormat.setOutputPath(job, new Path(inputDir,"outputs")); + TextOutputFormat.setOutputPath(job, new Path(inputDir.getParent(), "outputs")); TableMapReduceUtil.addDependencyJars(job); // Add a Class from the hbase.jar so it gets registered too. @@ -663,14 +666,14 @@ * @throws IOException */ private Path writeInputFile(final Configuration c) throws IOException { + SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss"); + Path jobdir = new Path(PERF_EVAL_DIR, formatter.format(new Date())); + Path inputDir = new Path(jobdir, "inputs"); + FileSystem fs = FileSystem.get(c); - if (!fs.exists(PERF_EVAL_DIR)) { - fs.mkdirs(PERF_EVAL_DIR); - } - SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss"); - Path subdir = new Path(PERF_EVAL_DIR, formatter.format(new Date())); - fs.mkdirs(subdir); - Path inputFile = new Path(subdir, "input.txt"); + fs.mkdirs(inputDir); + + Path inputFile = new Path(inputDir, "input.txt"); PrintStream out = new PrintStream(fs.create(inputFile)); // Make input random. Map m = new TreeMap(); @@ -695,7 +698,7 @@ } finally { out.close(); } - return subdir; + return inputDir; } /**