Index: src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (working copy) @@ -36,6 +36,7 @@ */ public class TableInputFormat extends TableInputFormatBase implements JobConfigurable { + @SuppressWarnings("hiding") private final Log LOG = LogFactory.getLog(TableInputFormat.class); /** @@ -62,7 +63,6 @@ } } - @SuppressWarnings("deprecation") public void validateInput(JobConf job) throws IOException { // expecting exactly one path Path [] tableNames = FileInputFormat.getInputPaths(job); Index: src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (working copy) @@ -63,7 +63,7 @@ m_table = table; } - public void close(@SuppressWarnings("unused") Reporter reporter) + public void close(Reporter reporter) throws IOException { m_table.flushCommits(); } @@ -68,7 +68,7 @@ m_table.flushCommits(); } - public void write(@SuppressWarnings("unused") ImmutableBytesWritable key, + public void write(ImmutableBytesWritable key, BatchUpdate value) throws IOException { m_table.commit(new BatchUpdate(value)); } @@ -76,11 +76,8 @@ @Override @SuppressWarnings("unchecked") - public RecordWriter getRecordWriter( - @SuppressWarnings("unused") FileSystem ignored, - JobConf job, - @SuppressWarnings("unused") String name, - @SuppressWarnings("unused") Progressable progress) throws IOException { + public RecordWriter getRecordWriter(FileSystem ignored, + JobConf job, String name, Progressable progress) throws IOException { // expecting exactly one path @@ -97,7 +94,6 @@ } @Override - @SuppressWarnings("unused") public void checkOutputSpecs(FileSystem ignored, JobConf job) throws FileAlreadyExistsException, InvalidJobConfException, IOException { Index: src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (working copy) @@ -87,9 +87,9 @@ * Pass the new key and value to reduce. * If any of the grouping columns are not found in the value, the record is skipped. */ - public void map(@SuppressWarnings("unused") ImmutableBytesWritable key, - RowResult value, OutputCollector output, - @SuppressWarnings("unused") Reporter reporter) throws IOException { + public void map(ImmutableBytesWritable key, RowResult value, + OutputCollector output, + Reporter reporter) throws IOException { byte[][] keyVals = extractKeyValues(value); if(keyVals != null) { Index: src/java/org/apache/hadoop/hbase/mapred/RowCounter.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/RowCounter.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/RowCounter.java (working copy) @@ -59,7 +59,7 @@ public void map(ImmutableBytesWritable row, RowResult value, OutputCollector output, - @SuppressWarnings("unused") Reporter reporter) + Reporter reporter) throws IOException { boolean content = false; for (Map.Entry e: value.entrySet()) { @@ -82,7 +82,7 @@ * @return the JobConf * @throws IOException */ - @SuppressWarnings({ "unused", "deprecation" }) + @SuppressWarnings("unused") public JobConf createSubmittableJob(String[] args) throws IOException { JobConf c = new JobConf(getConf(), RowCounter.class); c.setJobName(NAME); Index: src/java/org/apache/hadoop/hbase/mapred/TableSplit.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/TableSplit.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/TableSplit.java (working copy) @@ -30,7 +30,7 @@ /** * A table split corresponds to a key range [low, high) */ -public class TableSplit implements InputSplit, Comparable { +public class TableSplit implements InputSplit, Comparable { private byte [] m_tableName; private byte [] m_startRow; private byte [] m_endRow; @@ -106,8 +106,7 @@ Bytes.toString(m_startRow) + "," + Bytes.toString(m_endRow); } - public int compareTo(Object arg) { - TableSplit other = (TableSplit)arg; - return Bytes.compareTo(getStartRow(), other.getStartRow()); + public int compareTo(TableSplit o) { + return Bytes.compareTo(getStartRow(), o.getStartRow()); } } Index: src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java (working copy) @@ -61,7 +61,7 @@ } public int getPartition(ImmutableBytesWritable key, - @SuppressWarnings("unused") V2 value, int numPartitions) { + V2 value, int numPartitions) { byte[] region = null; // Only one region return 0 if (this.startKeys.length == 1){ Index: src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java (working copy) @@ -21,8 +21,6 @@ import java.io.DataInput; import java.io.DataOutput; -import java.io.IOException; //TODO: remove - import org.apache.hadoop.io.Writable; import org.apache.lucene.document.Document; @@ -47,11 +45,11 @@ return doc; } - public void readFields(@SuppressWarnings("unused") DataInput in) { + public void readFields(DataInput in) { // intentionally left blank } - public void write(@SuppressWarnings("unused") DataOutput out) { + public void write(DataOutput out) { // intentionally left blank } } Index: src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java (working copy) @@ -66,7 +66,7 @@ public void reduce(ImmutableBytesWritable key, Iterator values, OutputCollector output, - @SuppressWarnings("unused") Reporter reporter) + Reporter reporter) throws IOException { if (!values.hasNext()) { return; Index: src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (working copy) @@ -62,7 +62,7 @@ */ public void map(ImmutableBytesWritable key, RowResult value, OutputCollector output, - @SuppressWarnings("unused") Reporter reporter) throws IOException { + Reporter reporter) throws IOException { // convert output.collect(key, value); Index: src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java (working copy) @@ -45,7 +45,7 @@ */ public void reduce(ImmutableBytesWritable key, Iterator values, OutputCollector output, - @SuppressWarnings("unused") Reporter reporter) + Reporter reporter) throws IOException { while(values.hasNext()) { Index: src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java (working copy) @@ -102,8 +102,8 @@ boolean closed; private long docCount = 0; - public void write(@SuppressWarnings("unused") ImmutableBytesWritable key, - LuceneDocumentWrapper value) + public void write(ImmutableBytesWritable key, + LuceneDocumentWrapper value) throws IOException { // unwrap and index doc Document doc = value.get(); Index: src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java (working copy) @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.mapred; import java.io.ByteArrayInputStream; -import java.io.IOException; import java.io.OutputStream; import java.io.StringWriter; import java.util.concurrent.ConcurrentHashMap; @@ -75,6 +74,9 @@ static final String HBASE_INDEX_OPTIMIZE = "hbase.index.optimize"; public static class ColumnConf extends Properties { + + private static final long serialVersionUID = 7419012290580607821L; + boolean getBoolean(String name, boolean defaultValue) { String valueString = getProperty(name); if ("true".equals(valueString)) @@ -330,7 +332,7 @@ } } - public void write(OutputStream out) throws IOException { + public void write(OutputStream out) { try { Document doc = writeDocument(); DOMSource source = new DOMSource(doc); @@ -402,6 +404,7 @@ } } + @Override public String toString() { StringWriter writer = new StringWriter(); try { Index: src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java (working copy) @@ -34,9 +34,7 @@ import org.apache.hadoop.hbase.filter.StopRowFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.RowResult; -import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; @@ -74,7 +72,7 @@ */ public abstract class TableInputFormatBase implements InputFormat { - private final Log LOG = LogFactory.getLog(TableInputFormatBase.class); + final Log LOG = LogFactory.getLog(TableInputFormatBase.class); private byte [][] inputColumns; private HTable table; private TableRecordReader tableRecordReader; @@ -203,7 +201,6 @@ * @return true if there was more data * @throws IOException */ - @SuppressWarnings("unchecked") public boolean next(ImmutableBytesWritable key, RowResult value) throws IOException { RowResult result; @@ -215,13 +212,14 @@ this.scanner.next(); // skip presumed already mapped row result = this.scanner.next(); } - boolean hasMore = result != null && result.size() > 0; - if (hasMore) { + + if (result != null && result.size() > 0) { key.set(result.getRow()); lastRow = key.get(); Writables.copyWritable(result, value); + return true; } - return hasMore; + return false; } } @@ -232,10 +230,8 @@ * @see org.apache.hadoop.mapred.InputFormat#getRecordReader(InputSplit, * JobConf, Reporter) */ - public RecordReader getRecordReader(InputSplit split, - @SuppressWarnings("unused") - JobConf job, @SuppressWarnings("unused") - Reporter reporter) + public RecordReader getRecordReader( + InputSplit split, JobConf job, Reporter reporter) throws IOException { TableSplit tSplit = (TableSplit) split; TableRecordReader trr = this.tableRecordReader; Index: src/java/org/apache/hadoop/hbase/mapred/TableMap.java =================================================================== --- src/java/org/apache/hadoop/hbase/mapred/TableMap.java (revision 736069) +++ src/java/org/apache/hadoop/hbase/mapred/TableMap.java (working copy) @@ -32,8 +32,7 @@ * @param WritableComparable key class * @param Writable value class */ -@SuppressWarnings("unchecked") -public interface TableMap +public interface TableMap, V extends Writable> extends Mapper { }