Index: CHANGES.txt =================================================================== --- CHANGES.txt (revision 1004070) +++ CHANGES.txt (working copy) @@ -969,6 +969,7 @@ new master HBASE-2825 Scans respect row locks HBASE-3070 Add to hbaseadmin means of shutting down a regionserver + HBASE-2996 Fix and clean up Maven NEW FEATURES HBASE-1961 HBase EC2 scripts Index: src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (revision 1004070) +++ src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (working copy) @@ -45,7 +45,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -55,7 +54,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; -import com.google.common.util.concurrent.NamingThreadFactory; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** Index: src/main/java/org/apache/hadoop/hbase/client/package-info.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/package-info.java (revision 1004070) +++ src/main/java/org/apache/hadoop/hbase/client/package-info.java (working copy) @@ -86,7 +86,7 @@ // When you create a HBaseConfiguration, it reads in whatever you've set // into your hbase-site.xml and in hbase-default.xml, as long as these can // be found on the CLASSPATH - HBaseConfiguration config = new HBaseConfiguration(); + Configuration config = HBaseConfiguration.create(); // This instantiates an HTable object that connects you to // the "myLittleHBaseTable" table. Index: src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/SampleUploader.java =================================================================== --- src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/SampleUploader.java (revision 1004070) +++ src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/SampleUploader.java (working copy) @@ -135,7 +135,7 @@ * @throws Exception When running the job fails. */ public static void main(String[] args) throws Exception { - HBaseConfiguration conf = new HBaseConfiguration(); + Configuration conf = HBaseConfiguration.create(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if(otherArgs.length != 2) { System.err.println("Wrong number of arguments: " + otherArgs.length); @@ -145,4 +145,4 @@ Job job = configureJob(conf, otherArgs); System.exit(job.waitForCompletion(true) ? 0 : 1); } -} \ No newline at end of file +} Index: src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java =================================================================== --- src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java (revision 1004070) +++ src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java (working copy) @@ -141,7 +141,7 @@ } public static void main(String[] args) throws Exception { - HBaseConfiguration conf = new HBaseConfiguration(); + Configuration conf = HBaseConfiguration.create(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if(otherArgs.length < 3) { System.err.println("Only " + otherArgs.length + " arguments supplied, required: 3"); Index: pom.xml =================================================================== --- pom.xml (revision 1004070) +++ pom.xml (working copy) @@ -2,32 +2,26 @@ 4.0.0 + + + org.apache + apache + 7 + + org.apache.hbase hbase jar - ${hbase.version} + 0.89.0-SNAPSHOT HBase - HBase is the &lt;a href="http://hadoop.apache.org"&rt;Hadoop</a&rt; database. Use it when you need random, realtime read/write access to your Big Data. + HBase is the &lt;a href="http://hadoop.apache.org"&rt;Hadoop</a&rt; database. Use it when you need + random, realtime read/write access to your Big Data. This project's goal is the hosting of very large tables -- billions of rows X millions of columns -- atop clusters of commodity hardware. http://hbase.apache.org - - - Apache 2 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - A business-friendly OSS license - - - - - Apache Software Foundation - http://www.apache.org - - scm:svn:http://svn.apache.org/repos/asf/hbase/trunk scm:svn:https://svn.apache.org/repos/asf/hbase/trunk @@ -36,12 +30,12 @@ JIRA - http://issues.apache.org/jira/ + http://issues.apache.org/jira/browse/HBASE hudson - http://hudson.zones.apache.org/hudson/view/HBase/ + http://hudson.zones.apache.org/hudson/view/HBase/job/HBase-TRUNK/ @@ -103,8 +97,8 @@ Jonathan Gray jgray@streamy.com -8 - Streamy - http://www.streamy.com + Facebook + http://www.facebook.com larsgeorge @@ -142,11 +136,11 @@ - mirror.facebook.net - facebook mirror - http://mirror.facebook.net/maven/repository/ + temp-hadoop + Hadoop 0.20.1/2 packaging, thrift, zk + http://people.apache.org/~rawson/repo/ - true + false true @@ -164,17 +158,6 @@ - googlecode - Google Code - http://google-maven-repository.googlecode.com/svn/repository/ - - false - - - true - - - codehaus Codehaus Public http://repository.codehaus.org/ @@ -185,31 +168,8 @@ true - - temp-hadoop - Hadoop 0.20.1/2 packaging, thrift, zk - http://people.apache.org/~rawson/repo/ - - false - - - true - - - - ibiblio.org - ibiblio mirror - http://mirrors.ibiblio.org/pub/mirrors/maven2/ - - false - - - true - - - - build - - - + + + + + build + + + org.apache.rat apache-rat-plugin + 0.6 @@ -279,18 +221,18 @@ - ${project.build.directory} + ${project.build.directory} - hbase-webapps/** + hbase-webapps/** - com.agilejava.docbkx - docbkx-maven-plugin - 2.0.11 + com.agilejava.docbkx + docbkx-maven-plugin + 2.0.11 @@ -312,25 +254,11 @@ true true true - - - ${basedir}/target/site/ - - + ${basedir}/target/site/ maven-assembly-plugin - gnu @@ -400,18 +328,18 @@ + location="${project.build.directory}/hbase-webapps"/> + location="${basedir}/src/main/resources/hbase-webapps"/> + location="${project.build.directory}/generated-sources"/> - - - + + + @@ -420,19 +348,19 @@ + outputdir="${generated.sources}" + package="org.apache.hadoop.hbase.generated.master" + webxml="${build.webapps}/master/WEB-INF/web.xml"/> + outputdir="${generated.sources}" + package="org.apache.hadoop.hbase.generated.regionserver" + webxml="${build.webapps}/regionserver/WEB-INF/web.xml"/> - - - + @@ -444,6 +372,7 @@ org.codehaus.mojo build-helper-maven-plugin + 1.5 add-jspc-source @@ -476,127 +405,65 @@ 1.6 - UTF-8 - 0.89.0-SNAPSHOT - 0.20.3-append-r964955-1240 + + 1.3.3 1.2 - 1.1.1 - 6.1.24 - 6.1.14 - 4.8.1 - 1.8.4 - 1.2.15 - 3.3.1 - - 3.1 + 1.4 + 3.1 2.5 + 1.1.1 2.1 + r06 + 0.20.3-append-r964955-1240 5.5.23 2.1 - 1.1.5.1 - 1.4.0 + 6.1.25 + 6.1.14 + 1.4 + 1.5.2 1.1.1 + 4.8.1 + 1.2.16 + 1.8.5 2.3.0 - 1.5.8 - 1.0.1 - 0.2.0 - r05 + 1.5.8 + 1.0.1 + 0.2.0 + 3.3.1 - - - - commons-cli - commons-cli - ${commons-cli.version} - - - commons-logging - commons-logging - ${commons-logging.version} - + + + - - org.apache.hadoop - hadoop-core - ${hadoop.version} - - - ${project.groupId} - hbase - ${project.version} - - - ${project.groupId} - hbase - ${project.version} - tests - test - - - org.apache.hadoop - zookeeper - ${zookeeper.version} - - - - org.mortbay.jetty - servlet-api-2.5 - ${jetty.jspapi.version} - - - - - org.apache.hadoop - hadoop-test - ${hadoop.version} - test - - - junit - junit - ${junit.version} - test - - - org.mockito - mockito-all - ${mockito-all.version} - test - - - - + + com.google.guava + guava + ${guava.version} + + commons-cli commons-cli + ${commons-cli.version} + commons-codec + commons-codec + ${commons-codec.version} + + commons-httpclient commons-httpclient ${commons-httpclient.version} @@ -609,12 +476,29 @@ commons-logging commons-logging + ${commons-logging.version} log4j log4j + ${log4j.version} + org.apache.hadoop + avro + ${avro.version} + + + org.apache.hadoop + hadoop-core + ${hadoop.version} + + + org.apache.hadoop + zookeeper + ${zookeeper.version} + + org.apache.thrift thrift ${thrift.version} @@ -626,37 +510,20 @@ - org.slf4j - slf4j-api - ${slf4j.version} + org.jruby + jruby-complete + ${jruby.version} - org.slf4j - slf4j-log4j12 - ${slf4j.version} - runtime - - - - com.google.guava - guava - ${guava.version} - - - - org.apache.hadoop - hadoop-core - - - - org.apache.hadoop - zookeeper - - - org.mortbay.jetty jetty ${jetty.version} + + + org.mortbay.jetty + servlet-api + + org.mortbay.jetty @@ -665,24 +532,31 @@ org.mortbay.jetty - servlet-api-2.5 + jsp-2.1 + ${jetty.jspapi.version} org.mortbay.jetty - jsp-2.1 + jsp-api-2.1 ${jetty.jspapi.version} org.mortbay.jetty - jsp-api-2.1 + servlet-api-2.5 ${jetty.jspapi.version} - tomcat - jasper-runtime - ${jasper.version} + org.slf4j + slf4j-api + ${slf4j.version} + org.slf4j + slf4j-log4j12 + ${slf4j.version} + runtime + + @@ -722,56 +613,39 @@ ${jersey.version} - javax.xml.bind - jaxb-api - ${jaxb-api.version} - - javax.ws.rs jsr311-api ${jsr311.version} + javax.xml.bind + jaxb-api + ${jaxb-api.version} + + + javax.xml.stream + stax-api + + + + stax stax-api - 1.0.1 + ${stax-api.version} - - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - - org.apache.hadoop - avro - 1.3.2 - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.codehaus.jackson - jackson-core-asl - 1.5.2 - - - org.codehaus.jackson - jackson-mapper-asl - 1.5.2 - - junit junit + ${junit.version} + test org.mockito mockito-all + ${mockito-all.version} + test org.apache.commons @@ -779,15 +653,12 @@ ${commons-math.version} test - + org.apache.hadoop hadoop-test + ${hadoop.version} + test - - com.google.guava - guava - ${guava.version} - - - apache.releases.https - Apache Release Distribution Repository - https://repository.apache.org/service/local/staging/deploy/maven2 - - - apache.snapshots.https - Apache Development Snapshot Repository - https://repository.apache.org/content/repositories/snapshots - + HBase Site scp://people.apache.org/home/psmith/public_html/hbase/sandbox/hbase/