Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java (working copy) @@ -26,12 +26,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -187,7 +186,7 @@ break; } boolean found = false; - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); if (key.getColumn().equals(COL_REGIONINFO)) { info = (HRegionInfo) Writables.getWritable( @@ -275,7 +274,7 @@ break; } valuesfound += 1; - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); if (key.getColumn().equals(COL_REGIONINFO)) { info = (HRegionInfo) Writables.getWritable( @@ -375,7 +374,7 @@ break; } valuesfound += 1; - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); if (key.getColumn().equals(COL_REGIONINFO)) { info = (HRegionInfo) Writables.getWritable( Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java (working copy) @@ -33,12 +33,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.hadoop.hbase.util.Writables; /** @@ -254,7 +253,7 @@ if (values == null || values.size() == 0) { break; } - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); if (key.getColumn().equals(COL_REGIONINFO)) { info = (HRegionInfo) Writables.getWritable( @@ -686,7 +685,7 @@ } SortedMap results = new TreeMap(); - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); results.put(key.getColumn(), ((ImmutableBytesWritable) e.getValue()).get()); Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (working copy) @@ -47,12 +47,9 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; -import org.apache.hadoop.hbase.util.Writables; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.Server; @@ -58,6 +55,8 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.hbase.io.BatchUpdate; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.util.Writables; /** @@ -209,7 +208,7 @@ break; } - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); results.put(key.getColumn(), ((ImmutableBytesWritable) e.getValue()).get()); @@ -1730,7 +1729,7 @@ SortedMap results = new TreeMap(); Text row = null; - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); Text thisRow = key.getRow(); if (row == null) { @@ -2406,7 +2405,7 @@ // be inserted if it exists so look for exact match on table name. if (data != null && data.size() > 0) { - for (WritableComparable k: data.keySet()) { + for (Writable k: data.keySet()) { if (HRegionInfo.getTableNameFromRegionName( ((HStoreKey) k).getRow()).equals(tableName)) { @@ -2553,8 +2552,7 @@ break; } boolean haveRegionInfo = false; - for (Map.Entry e: - values.entrySet()) { + for (Map.Entry e: values.entrySet()) { byte[] value = ((ImmutableBytesWritable) e.getValue()).get(); if (value == null || value.length == 0) { Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java (working copy) @@ -23,7 +23,8 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.io.BatchUpdate; -import org.apache.hadoop.hbase.io.MapWritable; + +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.VersionedProtocol; Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (working copy) @@ -40,9 +40,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; @@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.BatchOperation; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.hadoop.hbase.util.Writables; /******************************************************************************* @@ -1034,10 +1032,7 @@ throws IOException { requestCount.incrementAndGet(); HRegion region = getRegion(regionName); - MapWritable result = new MapWritable(HStoreKey.class, - ImmutableBytesWritable.class, - new TreeMap()); - + MapWritable result = new MapWritable(); TreeMap map = region.getFull(row); for (Map.Entry es: map.entrySet()) { result.put(new HStoreKey(row, es.getKey()), @@ -1059,9 +1054,7 @@ // Collect values to be returned here - MapWritable values = new MapWritable(HStoreKey.class, - ImmutableBytesWritable.class, - new TreeMap()); + MapWritable values = new MapWritable(); // Keep getting rows until we find one that has at least one non-deleted column value Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java (working copy) @@ -35,10 +35,10 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; + +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.ipc.RemoteException; /** @@ -361,7 +361,7 @@ } SortedMap results = new TreeMap(); if (value != null && value.size() != 0) { - for (Map.Entry e: value.entrySet()) { + for (Map.Entry e: value.entrySet()) { HStoreKey key = (HStoreKey) e.getKey(); results.put(key.getColumn(), ((ImmutableBytesWritable) e.getValue()).get()); @@ -764,7 +764,7 @@ } while (values != null && values.size() == 0 && nextScanner()); if (values != null && values.size() != 0) { - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey k = (HStoreKey) e.getKey(); key.setRow(k.getRow()); key.setVersion(k.getTimestamp()); Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/MapWritable.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/MapWritable.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/MapWritable.java (working copy) @@ -1,303 +0,0 @@ -/** - * Copyright 2007 The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.io; - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.Set; - -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; - -import java.io.ByteArrayOutputStream; -import java.io.DataOutputStream; -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.util.TreeMap; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.hbase.HStoreKey; -import org.apache.hadoop.hbase.HConstants; - -/** - * - */ -public class MapWritable implements Writable, Map { - private String keyClass = null; - private String valueClass = null; - private String mapClass = null; - private Map instance = null; - - /** - * Default constructor used by writable - */ - public MapWritable() {} - - /** - * @param keyClass the class of the keys - * @param valueClass the class of the values - * @param instance the Map to be wrapped in this Writable - */ - @SuppressWarnings("unchecked") - public MapWritable(Class keyClass, Class valueClass, - Map instance) { - - this.keyClass = keyClass.getName(); - this.valueClass = valueClass.getName(); - this.instance = instance; - this.mapClass = instance.getClass().getName(); - } - - private void checkInitialized() { - if (keyClass == null || - valueClass == null || - mapClass == null || - instance == null) { - - throw new IllegalStateException("object has not been properly initialized"); - } - } - - /** {@inheritDoc} */ - public void clear() { - checkInitialized(); - instance.clear(); - } - - /** {@inheritDoc} */ - public boolean containsKey(Object key) { - checkInitialized(); - return instance.containsKey(key); - } - - /** {@inheritDoc} */ - public boolean containsValue(Object value) { - checkInitialized(); - return instance.containsValue(value); - } - - /** {@inheritDoc} */ - public Set> entrySet() { - checkInitialized(); - return instance.entrySet(); - } - - /** {@inheritDoc} */ - public Writable get(Object key) { - checkInitialized(); - return instance.get(key); - } - - /** - * Returns the value to which this map maps the specified key - * @param key - * @return value associated with specified key - */ - public Writable get(WritableComparable key) { - checkInitialized(); - return instance.get(key); - } - - /** {@inheritDoc} */ - public boolean isEmpty() { - checkInitialized(); - return instance.isEmpty(); - } - - /** {@inheritDoc} */ - public Set keySet() { - checkInitialized(); - return instance.keySet(); - } - - /** {@inheritDoc} */ - public Writable put(WritableComparable key, Writable value) { - checkInitialized(); - return instance.put(key, value); - } - - /** {@inheritDoc} */ - public void putAll(Map t) { - checkInitialized(); - instance.putAll(t); - } - - /** {@inheritDoc} */ - public Writable remove(Object key) { - checkInitialized(); - return instance.remove(key); - } - - /** - * Removes the mapping for this key from this map if it is present - * @param key - * @return value corresponding to key - */ - public Writable remove(WritableComparable key) { - checkInitialized(); - return instance.remove(key); - } - - /** {@inheritDoc} */ - public int size() { - checkInitialized(); - return instance.size(); - } - - /** {@inheritDoc} */ - public Collection values() { - checkInitialized(); - return instance.values(); - } - - // Writable - - /** {@inheritDoc} */ - public void write(DataOutput out) throws IOException { - checkInitialized(); - out.writeUTF(mapClass); - out.writeUTF(keyClass); - out.writeUTF(valueClass); - out.writeInt(instance.size()); - - for (Map.Entry e: instance.entrySet()) { - e.getKey().write(out); - e.getValue().write(out); - } - } - - /** {@inheritDoc} */ - @SuppressWarnings("unchecked") - public void readFields(DataInput in) throws IOException { - mapClass = in.readUTF(); - keyClass = in.readUTF(); - valueClass = in.readUTF(); - - instance = (Map) objectFactory(mapClass); - - int entries = in.readInt(); - for (int i = 0; i < entries; i++) { - WritableComparable key = (WritableComparable) objectFactory(keyClass); - key.readFields(in); - - Writable value = (Writable) objectFactory(valueClass); - value.readFields(in); - - instance.put(key, value); - } - } - - private Object objectFactory(String className) throws IOException { - Object o = null; - String exceptionMessage = null; - try { - o = Class.forName(className).newInstance(); - - } catch (ClassNotFoundException e) { - exceptionMessage = e.getMessage(); - - } catch (InstantiationException e) { - exceptionMessage = e.getMessage(); - - } catch (IllegalAccessException e) { - exceptionMessage = e.getMessage(); - - } finally { - if (exceptionMessage != null) { - throw new IOException("error instantiating " + className + " because " + - exceptionMessage); - } - } - return o; - } - - /** - * A simple main program to test this class. - * - * @param args not used - * @throws IOException - */ - @SuppressWarnings("unchecked") - public static void main(@SuppressWarnings("unused") String[] args) - throws IOException { - - HStoreKey[] keys = { - new HStoreKey(new Text("row1"), HConstants.COL_REGIONINFO), - new HStoreKey(new Text("row2"), HConstants.COL_SERVER), - new HStoreKey(new Text("row3"), HConstants.COL_STARTCODE) - }; - - ImmutableBytesWritable[] values = { - new ImmutableBytesWritable("value1".getBytes()), - new ImmutableBytesWritable("value2".getBytes()), - new ImmutableBytesWritable("value3".getBytes()) - }; - - @SuppressWarnings("unchecked") - MapWritable inMap = new MapWritable(HStoreKey.class, - ImmutableBytesWritable.class, - (Map) new TreeMap()); - - for (int i = 0; i < keys.length; i++) { - inMap.put(keys[i], values[i]); - } - - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - DataOutput out = new DataOutputStream(bytes); - try { - inMap.write(out); - - } catch (IOException e) { - e.printStackTrace(); - throw e; - } - - MapWritable outMap = new MapWritable(); - DataInput in = - new DataInputStream(new ByteArrayInputStream(bytes.toByteArray())); - - try { - outMap.readFields(in); - - } catch (IOException e) { - e.printStackTrace(); - throw e; - } - - if (outMap.size() != inMap.size()) { - System.err.println("outMap.size()=" + outMap.size() + " != " + - "inMap.size()=" + inMap.size()); - } - - for (Map.Entry e: inMap.entrySet()) { - if (!outMap.containsKey(e.getKey())) { - System.err.println("outMap does not contain key " + e.getKey().toString()); - continue; - } - if (((WritableComparable) outMap.get(e.getKey())).compareTo( - e.getValue()) != 0) { - System.err.println("output value for " + e.getKey().toString() + " != input value"); - } - } - System.out.println("it worked!"); - } -} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (working copy) @@ -25,11 +25,10 @@ import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; @@ -88,7 +87,7 @@ * Pass the new key and value to reduce. * If any of the grouping columns are not found in the value, the record is skipped. * - * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.hbase.io.MapWritable, org.apache.hadoop.hbase.mapred.TableOutputCollector, org.apache.hadoop.mapred.Reporter) + * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.hbase.mapred.TableOutputCollector, org.apache.hadoop.mapred.Reporter) */ @Override public void map(@SuppressWarnings("unused") HStoreKey key, @@ -116,7 +115,7 @@ ArrayList foundList = new ArrayList(); int numCols = m_columns.length; if(numCols > 0) { - for (Map.Entry e: r.entrySet()) { + for (Map.Entry e: r.entrySet()) { Text column = (Text) e.getKey(); for (int i = 0; i < numCols; i++) { if (column.equals(m_columns[i])) { Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (working copy) @@ -22,7 +22,7 @@ import java.io.IOException; import org.apache.hadoop.hbase.HStoreKey; -import org.apache.hadoop.hbase.io.MapWritable; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Reporter; @@ -40,7 +40,7 @@ /** * Pass the key, value to reduce * - * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.hbase.io.MapWritable, org.apache.hadoop.hbase.mapred.TableOutputCollector, org.apache.hadoop.mapred.Reporter) + * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.hbase.mapred.TableOutputCollector, org.apache.hadoop.mapred.Reporter) */ @Override public void map(HStoreKey key, MapWritable value, Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java (working copy) @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Iterator; -import org.apache.hadoop.hbase.io.MapWritable; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Reporter; Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (working copy) @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.InputFormat; @@ -39,7 +40,6 @@ import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.log4j.Logger; @@ -107,9 +107,7 @@ */ @SuppressWarnings("unchecked") public MapWritable createValue() { - return new MapWritable((Class) Text.class, - (Class) ImmutableBytesWritable.class, - (Map) new TreeMap()); + return new MapWritable(); } /** {@inheritDoc} */ Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (working copy) @@ -21,6 +21,8 @@ import java.io.IOException; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobConf; @@ -28,10 +30,8 @@ import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HStoreKey; -import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; /** Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableOutputCollector.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableOutputCollector.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableOutputCollector.java (working copy) @@ -21,11 +21,10 @@ import java.io.IOException; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.OutputCollector; -import org.apache.hadoop.hbase.io.MapWritable; - /** * Refine the types that can be collected from a Table Map/Reduce jobs. */ Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (revision 571334) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (working copy) @@ -23,9 +23,9 @@ import java.util.Map; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.mapred.JobConf; @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.HTable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.log4j.Logger; @@ -82,7 +81,7 @@ long xid = m_table.startUpdate(key); - for (Map.Entry e: value.entrySet()) { + for (Map.Entry e: value.entrySet()) { m_table.put(xid, (Text)e.getKey(), ((ImmutableBytesWritable)e.getValue()).get()); } Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java (revision 571334) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java (working copy) @@ -32,8 +32,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.hbase.filter.RegExpRowFilter; import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterSet; @@ -40,9 +41,7 @@ import org.apache.hadoop.hbase.filter.StopRowFilter; import org.apache.hadoop.hbase.filter.WhileMatchRowFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.Text; /** * Additional scanner tests. @@ -219,7 +218,7 @@ break; } - for (Map.Entry e: values.entrySet()) { + for (Map.Entry e: values.entrySet()) { HStoreKey k = (HStoreKey) e.getKey(); results.put(k.getColumn(), ((ImmutableBytesWritable) e.getValue()).get()); Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java (revision 571334) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.dfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; @@ -35,9 +36,9 @@ import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.io.MapWritable; import org.apache.hadoop.hbase.mapred.TableMap; import org.apache.hadoop.hbase.mapred.TableOutputCollector; +import org.apache.hadoop.hbase.mapred.TableReduce; import org.apache.hadoop.hbase.mapred.IdentityTableReduce; /** @@ -120,7 +121,7 @@ /** * Pass the key, and reversed value to reduce * - * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.hbase.io.MapWritable, org.apache.hadoop.hbase.mapred.TableOutputCollector, org.apache.hadoop.mapred.Reporter) + * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.hbase.mapred.TableOutputCollector, org.apache.hadoop.mapred.Reporter) */ @SuppressWarnings("unchecked") @Override @@ -151,9 +152,7 @@ // Now set the value to be collected - MapWritable outval = new MapWritable((Class) Text.class, - (Class) ImmutableBytesWritable.class, - (Map) new TreeMap()); + MapWritable outval = new MapWritable(); outval.put(TEXT_OUTPUT_COLUMN, new ImmutableBytesWritable(newValue.toString().getBytes())); @@ -163,6 +162,7 @@ /** * Test hbase mapreduce jobs against single region and multi-region tables. + * @throws IOException */ public void testTableMapReduce() throws IOException { localTestSingleRegionTable(); @@ -214,7 +214,7 @@ TableMap.initJob(SINGLE_REGION_TABLE_NAME, INPUT_COLUMN, ProcessContentsMapper.class, jobConf); - IdentityTableReduce.initJob(SINGLE_REGION_TABLE_NAME, + TableReduce.initJob(SINGLE_REGION_TABLE_NAME, IdentityTableReduce.class, jobConf); JobClient.runJob(jobConf); @@ -264,7 +264,7 @@ TableMap.initJob(MULTI_REGION_TABLE_NAME, INPUT_COLUMN, ProcessContentsMapper.class, jobConf); - IdentityTableReduce.initJob(MULTI_REGION_TABLE_NAME, + TableReduce.initJob(MULTI_REGION_TABLE_NAME, IdentityTableReduce.class, jobConf); JobClient.runJob(jobConf); @@ -306,6 +306,7 @@ } } + @SuppressWarnings("null") private void verify(Configuration conf, String tableName) throws IOException { HTable table = new HTable(conf, new Text(tableName)); @@ -334,6 +335,8 @@ } // verify second value is the reverse of the first + assertNotNull(firstValue); + assertNotNull(secondValue); assertEquals(firstValue.length, secondValue.length); for (int i=0; i