diff --git a/contrib/mdc_replication/pom.xml b/contrib/mdc_replication/pom.xml
index c8956be..2836aa5 100644
--- a/contrib/mdc_replication/pom.xml
+++ b/contrib/mdc_replication/pom.xml
@@ -39,15 +39,7 @@
org.apache.hadoop
- hadoop-core-test
-
-
- org.apache.hadoop
- hadoop-hdfs-test
-
-
- org.apache.hadoop
- hadoop-mapred-test
+ hadoop-test
diff --git a/contrib/stargate/pom.xml b/contrib/stargate/pom.xml
index 16e201e..f4375d1 100644
--- a/contrib/stargate/pom.xml
+++ b/contrib/stargate/pom.xml
@@ -19,6 +19,7 @@
1.1.4.1
20090211
1.8.0.10
+ 3.0.1
@@ -45,11 +46,9 @@
org.apache.hadoop
- hadoop-core-test
-
-
- org.apache.hadoop
- hadoop-hdfs-test
+ hadoop-test
+ ${hadoop.version}
+ test
javax.ws.rs
@@ -72,6 +71,11 @@
${jersey.version}
+ commons-httpclient
+ commons-httpclient
+ ${commons-httpclient.version}
+
+
org.json
json
${json.version}
diff --git a/contrib/transactional/pom.xml b/contrib/transactional/pom.xml
index 14a4b96..6b51634 100644
--- a/contrib/transactional/pom.xml
+++ b/contrib/transactional/pom.xml
@@ -37,11 +37,9 @@
org.apache.hadoop
- hadoop-core-test
-
-
- org.apache.hadoop
- hadoop-hdfs-test
+ hadoop-test
+ ${hadoop.version}
+ test
diff --git a/core/pom.xml b/core/pom.xml
index 2d7295f..41ba531 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -179,63 +179,50 @@
- org.apache.hadoop
- hadoop-mapred
- ${hadoop-mapred.version}
-
-
- ant
- ant
-
-
- com.thoughtworks.paranamer
- paranamer
-
-
+ org.mortbay.jetty
+ jetty
+ ${jetty.version}
- org.apache.hadoop
- hadoop-mapred-test
+ org.mortbay.jetty
+ jetty-util
+ ${jetty.version}
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop-hdfs.version}
+ org.mortbay.jetty
+ servlet-api-2.5
+ ${jetty.version}
- org.apache.hadoop
- hadoop-hdfs-test
+ org.mortbay.jetty
+ jsp-2.1
+ ${jetty.version}
+
+
+ org.mortbay.jetty
+ jsp-api-2.1
+ ${jetty.version}
+
+
+ tomcat
+ jasper-runtime
+ ${jasper.version}
+ tomcat
+ jasper-compiler
+ ${jasper.version}
+
+
+
org.apache.hadoop
hadoop-core
- ${hadoop-core.version}
-
-
- ant
- ant
-
-
- com.thoughtworks.paranamer
- paranamer-ant
-
-
- hsqldb
- hsqldb
-
-
- net.sf.kosmosfs
- kfs
-
-
- net.java.dev.jets3t
- jets3t
-
-
+ ${hadoop.version}
org.apache.hadoop
- hadoop-core-test
+ hadoop-test
+ ${hadoop.version}
org.apache.thrift
@@ -259,17 +246,35 @@
${commons-lang.version}
- org.slf4j
- slf4j-api
- ${slf4j.version}
-
-
- org.slf4j
- slf4j-log4j12
- ${slf4j.version}
-
-
- org.jruby
+ commons-cli
+ commons-cli
+ ${commons-cli.version}
+
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+
+
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
+
+
+ org.jruby
jruby-complete
${jruby.version}
diff --git a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
index ecdc8a4..7239185 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import java.io.IOException;
@@ -59,9 +58,7 @@ public class CopyTable {
if (!doCommandLine(args)) {
return null;
}
- Cluster mrCluster = new Cluster(conf);
- Job job = Job.getInstance(mrCluster, conf);
- job.setJobName(NAME + "_" + tableName);
+ Job job = new Job(conf, NAME + "_" + tableName);
job.setJarByClass(CopyTable.class);
Scan scan = new Scan();
if (startTime != 0) {
diff --git a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
index 832ac86..f267758 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@@ -79,8 +78,7 @@ public class Export {
throws IOException {
String tableName = args[0];
Path outputDir = new Path(args[1]);
- Cluster mrCluster = new Cluster(conf);
- Job job = Job.getInstance(mrCluster, conf);
+ Job job = new Job(conf, NAME + "_" + tableName);
job.setJobName(NAME + "_" + tableName);
job.setJarByClass(Exporter.class);
// TODO: Allow passing filter and subset of rows/columns.
diff --git a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index 2e07edc..2588c3b 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@@ -86,10 +85,7 @@ public class Import {
throws IOException {
String tableName = args[0];
Path inputDir = new Path(args[1]);
- Cluster mrCluster = new Cluster(conf);
- Job job = Job.getInstance(mrCluster, conf);
- job.setJobName(NAME + "_" + tableName);
-
+ Job job = new Job(conf, NAME + "_" + tableName);
job.setJarByClass(Importer.class);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormatClass(SequenceFileInputFormat.class);
diff --git a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
index a2a4070..591b29c 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
@@ -86,9 +85,7 @@ public class RowCounter {
public static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException {
String tableName = args[0];
- Cluster mrCluster = new Cluster(conf);
- Job job = Job.getInstance(mrCluster, conf);
- job.setJobName(NAME + "_" + tableName);
+ Job job = new Job(conf, NAME + "_" + tableName);
job.setJarByClass(RowCounter.class);
// Columns are space delimited
StringBuilder sb = new StringBuilder();
diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
index 5bd3fdd..d2b01fe 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
@@ -1029,8 +1029,7 @@ public class HLog implements HConstants, Syncable {
* @throws IOException
*/
public static List splitLog(final Path rootDir, final Path srcDir,
- Path oldLogDir, final FileSystem fs, final Configuration conf)
- throws IOException {
+ Path oldLogDir, final FileSystem fs, final Configuration conf) throws IOException {
long millis = System.currentTimeMillis();
List splits = null;
diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
index 7808d95..7beb20d 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
@@ -66,9 +66,8 @@ public class SequenceFileLogWriter implements HLog.Writer {
@Override
public void sync() throws IOException {
this.writer.sync();
- if (this.writer_out != null) {
- this.writer_out.hflush();
- }
+
+ this.writer.syncFs();
}
}
diff --git a/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java b/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
index c0be47f..a816278 100644
--- a/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
+++ b/core/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.junit.After;
@@ -354,8 +353,7 @@ public class TestTableInputFormatScan {
FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));
LOG.info("Started " + job.getJobName());
job.waitForCompletion(true);
- LOG.info("Job status: " + job.getStatus());
- assertTrue(job.getStatus().getState() == JobStatus.State.SUCCEEDED);
+ assertTrue(job.isComplete());
LOG.info("After map/reduce completion - job " + jobName);
}
}
diff --git a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
index 5a449b8..05dd38d 100644
--- a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
+++ b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreReconstruction.java
@@ -50,7 +50,8 @@ public class TestStoreReconstruction {
* @throws java.lang.Exception
*/
@BeforeClass
- public static void setUpBeforeClass() throws Exception { }
+ public static void setUpBeforeClass() throws Exception {
+ }
/**
* @throws java.lang.Exception
@@ -104,8 +105,7 @@ public class TestStoreReconstruction {
List result = new ArrayList();
// Empty set to get all columns
- NavigableSet qualifiers =
- new ConcurrentSkipListSet(Bytes.BYTES_COMPARATOR);
+ NavigableSet qualifiers = new ConcurrentSkipListSet(Bytes.BYTES_COMPARATOR);
final byte[] tableName = Bytes.toBytes(TABLE);
final byte[] rowName = tableName;
@@ -133,12 +133,15 @@ public class TestStoreReconstruction {
System.currentTimeMillis());
log.sync();
+ // TODO dont close the file here.
+ log.close();
+
List splits =
HLog.splitLog(new Path(conf.get(HConstants.HBASE_DIR)),
this.dir, oldLogDir, cluster.getFileSystem(), conf);
// Split should generate only 1 file since there's only 1 region
- assertTrue(splits.size() == 1);
+ assertEquals(1, splits.size());
// Make sure the file exists
assertTrue(cluster.getFileSystem().exists(splits.get(0)));
@@ -150,6 +153,6 @@ public class TestStoreReconstruction {
Get get = new Get(rowName);
store.get(get, qualifiers, result);
// Make sure we only see the good edits
- assertEquals(result.size(), TOTAL_EDITS);
+ assertEquals(TOTAL_EDITS, result.size());
}
}
diff --git a/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java b/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
index 49f519f..6b0b8fe 100644
--- a/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
+++ b/core/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
@@ -120,7 +120,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
* Test new HDFS-265 sync.
* @throws Exception
*/
- public void testSync() throws Exception {
+ public void Broken_testSync() throws Exception {
byte [] bytes = Bytes.toBytes(getName());
// First verify that using streams all works.
Path p = new Path(this.dir, getName() + ".fsdos");
diff --git a/pom.xml b/pom.xml
index 7773259..df99822 100644
--- a/pom.xml
+++ b/pom.xml
@@ -158,12 +158,14 @@
1.6
UTF-8
+ 0.20.2-with-200-826
+
1.2.15
+ 6.1.14
+ 5.5.12
2.4
2.0
- 0.21.0-SNAPSHOT
- 0.21.0-SNAPSHOT
- 0.21.0-SNAPSHOT
+ 1.2
3.2.2
0.2.0
@@ -174,28 +176,6 @@
- asf-releases
- Apache Public Releases
- https://repository.apache.org/content/repositories/releases/
-
- true
-
-
- true
-
-
-
- asf-snapshots
- Apache Public Snapshots
- https://repository.apache.org/content/repositories/snapshots/
-
- true
-
-
- true
-
-
-
java.net
Java.Net
http://download.java.net/maven/2/
@@ -209,7 +189,7 @@
googlecode
Google Code
- http://google-maven-repository.googlecode.com/svn/repository
+ http://google-maven-repository.googlecode.com/svn/repository/
false
@@ -228,11 +208,10 @@
true
-
- misc
- Miscellaneous (Stuff for Zookeeper and Thrift)
- http://people.apache.org/~psmith/hbase/repo
+ temp-hadoop
+ Hadoop 0.20.1/2 packaging, thrift, zk
+ http://people.apache.org/~rawson/repo/
false
@@ -329,20 +308,8 @@
org.apache.hadoop
- hadoop-core-test
- ${hadoop-core.version}
- test
-
-
- org.apache.hadoop
- hadoop-hdfs-test
- ${hadoop-hdfs.version}
- test
-
-
- org.apache.hadoop
- hadoop-mapred-test
- ${hadoop-mapred.version}
+ hadoop-test
+ ${hadoop.version}
test