diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java index f01748c0f1..79d687faf7 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseScanRange.java @@ -87,6 +87,7 @@ public void setup(Scan scan, Configuration conf, boolean filterOnly) throws Exce scan.setFilter(new FilterList(filters)); } + @Override public String toString() { return (startRow == null ? "" : new BytesWritable(startRow).toString()) + " ~ " + (stopRow == null ? "" : new BytesWritable(stopRow).toString()); diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java index 1588283f40..5147d0e3c9 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; -import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java index eb3560caaa..480484c6bb 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java @@ -167,6 +167,7 @@ public TypeInfo getTypeForName(String columnName) { throw new IllegalArgumentException("Invalid column name " + columnName); } + @Override public String toString() { return "[" + columnMappingString + ":" + getColumnNames() + ":" + getColumnTypes() + "]"; } diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java index f3735a3cb5..16658d0392 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; import org.apache.hadoop.hive.ql.index.IndexSearchCondition; diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java index b344e16c13..f0eccd72ed 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java @@ -126,10 +126,12 @@ public MyRecordWriter(BufferedMutator table, Connection connection, boolean walE m_connection = connection; } + @Override public void close(Reporter reporter) throws IOException { m_table.close(); } + @Override public void write(ImmutableBytesWritable key, Object value) throws IOException { Put put; diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java index cd0a9908e0..2d3f34908a 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java @@ -22,11 +22,9 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.mapred.TableInputFormat; import org.apache.hadoop.hbase.mapred.TableSnapshotInputFormat; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; @@ -36,7 +34,6 @@ import java.io.IOException; import java.util.Base64; -import java.util.List; public class HiveHBaseTableSnapshotInputFormat implements InputFormat { diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java index 9615da5170..ffb2aa3583 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java @@ -26,7 +26,6 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.avro.AvroLazyObjectInspector; import org.apache.hadoop.hive.serde2.avro.AvroSchemaRetriever; -import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils; import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java index 6e08d4e3fb..52b9f786e8 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java @@ -28,7 +28,6 @@ import java.util.Properties; import org.junit.Assert; -import junit.framework.TestCase; import org.apache.avro.Schema; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; @@ -556,15 +555,15 @@ public void testHBaseSerDeWithHiveMapToHBaseColumnFamily() throws SerDeException byte [][][] columnQualifiersAndValues = new byte [][][] { {Bytes.toBytes(1), new byte [] {1}, Bytes.toBytes((short) 1), - Bytes.toBytes((long) 1), Bytes.toBytes((float) 1.0F), Bytes.toBytes(1.0), + Bytes.toBytes((long) 1), Bytes.toBytes(1.0F), Bytes.toBytes(1.0), Bytes.toBytes(true)}, {Bytes.toBytes(Integer.MIN_VALUE), new byte [] {Byte.MIN_VALUE}, - Bytes.toBytes((short) Short.MIN_VALUE), Bytes.toBytes((long) Long.MIN_VALUE), - Bytes.toBytes((float) Float.MIN_VALUE), Bytes.toBytes(Double.MIN_VALUE), + Bytes.toBytes(Short.MIN_VALUE), Bytes.toBytes(Long.MIN_VALUE), + Bytes.toBytes(Float.MIN_VALUE), Bytes.toBytes(Double.MIN_VALUE), Bytes.toBytes(false)}, {Bytes.toBytes(Integer.MAX_VALUE), new byte [] {Byte.MAX_VALUE}, - Bytes.toBytes((short) Short.MAX_VALUE), Bytes.toBytes((long) Long.MAX_VALUE), - Bytes.toBytes((float) Float.MAX_VALUE), Bytes.toBytes(Double.MAX_VALUE), + Bytes.toBytes(Short.MAX_VALUE), Bytes.toBytes(Long.MAX_VALUE), + Bytes.toBytes(Float.MAX_VALUE), Bytes.toBytes(Double.MAX_VALUE), Bytes.toBytes(true)} }; diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java index 5d6302bf15..4e5898e35f 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java @@ -23,8 +23,6 @@ import java.util.Collections; import java.util.List; -import junit.framework.TestCase; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; @@ -360,8 +358,8 @@ public void testLazyHBaseCellMap3() throws SerDeException { hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi); byte [] cfFloat = "cf-float".getBytes(); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) 1.0F), - Bytes.toBytes((float) 1.0F))); + kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes(1.0F), + Bytes.toBytes(1.0F))); result = Result.create(kvs); hbaseCellMap.init(result, cfFloat, mapBinaryStorage); FloatWritable expectedFloatValue = new FloatWritable(1.0F); @@ -371,8 +369,8 @@ public void testLazyHBaseCellMap3() throws SerDeException { assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MIN_VALUE), - Bytes.toBytes((float) Float.MIN_VALUE))); + kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes(Float.MIN_VALUE), + Bytes.toBytes(Float.MIN_VALUE))); result = Result.create(kvs); hbaseCellMap.init(result, cfFloat, mapBinaryStorage); expectedFloatValue = new FloatWritable(Float.MIN_VALUE); @@ -382,8 +380,8 @@ public void testLazyHBaseCellMap3() throws SerDeException { assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject()); kvs.clear(); - kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MAX_VALUE), - Bytes.toBytes((float) Float.MAX_VALUE))); + kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes(Float.MAX_VALUE), + Bytes.toBytes(Float.MAX_VALUE))); result = Result.create(kvs); hbaseCellMap.init(result, cfFloat, mapBinaryStorage); expectedFloatValue = new FloatWritable(Float.MAX_VALUE); @@ -772,11 +770,11 @@ public void testLazyHBaseRow3() throws SerDeException { break; case 5: - value = Bytes.toBytes((float) 1.0F); + value = Bytes.toBytes(1.0F); break; case 6: - value = Bytes.toBytes((double) 1.0); + value = Bytes.toBytes(1.0); break; case 7: diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Address.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Address.java index e9145c74eb..e18b3218c6 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Address.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Address.java @@ -37,8 +37,10 @@ public Address(java.lang.CharSequence address1, java.lang.CharSequence address2, this.metadata = metadata; } + @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. + @Override public java.lang.Object get(int field$) { switch (field$) { case 0: return address1; @@ -52,6 +54,7 @@ public Address(java.lang.CharSequence address1, java.lang.CharSequence address2, } } // Used by DatumReader. Applications should not call. + @Override @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) { @@ -59,7 +62,7 @@ public void put(int field$, java.lang.Object value$) { case 1: address2 = (java.lang.CharSequence)value$; break; case 2: city = (java.lang.CharSequence)value$; break; case 3: zipcode = (java.lang.Long)value$; break; - case 4: county = (java.lang.Object)value$; break; + case 4: county = value$; break; case 5: aliases = (java.util.List)value$; break; case 6: metadata = (java.util.Map)value$; break; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/ContactInfo.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/ContactInfo.java index a19a937207..d5075ded0c 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/ContactInfo.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/ContactInfo.java @@ -29,8 +29,10 @@ public ContactInfo(java.util.List add this.officePhone = officePhone; } + @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. + @Override public java.lang.Object get(int field$) { switch (field$) { case 0: return address; @@ -40,6 +42,7 @@ public ContactInfo(java.util.List add } } // Used by DatumReader. Applications should not call. + @Override @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) { diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Employee.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Employee.java index d1d5cb24af..627298c9f7 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Employee.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/Employee.java @@ -33,8 +33,10 @@ public Employee(java.lang.CharSequence employeeName, java.lang.Long employeeID, this.contactInfo = contactInfo; } + @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. + @Override public java.lang.Object get(int field$) { switch (field$) { case 0: return employeeName; @@ -46,6 +48,7 @@ public Employee(java.lang.CharSequence employeeName, java.lang.Long employeeID, } } // Used by DatumReader. Applications should not call. + @Override @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) { diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/HomePhone.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/HomePhone.java index d922bd724c..f2901b33ff 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/HomePhone.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/HomePhone.java @@ -27,8 +27,10 @@ public HomePhone(java.lang.Long areaCode, java.lang.Long number) { this.number = number; } + @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. + @Override public java.lang.Object get(int field$) { switch (field$) { case 0: return areaCode; @@ -37,6 +39,7 @@ public HomePhone(java.lang.Long areaCode, java.lang.Long number) { } } // Used by DatumReader. Applications should not call. + @Override @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) { diff --git a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/OfficePhone.java b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/OfficePhone.java index 26c9df057a..d5fc33bf8b 100644 --- a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/OfficePhone.java +++ b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/avro/OfficePhone.java @@ -27,8 +27,10 @@ public OfficePhone(java.lang.Long areaCode, java.lang.Long number) { this.number = number; } + @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. + @Override public java.lang.Object get(int field$) { switch (field$) { case 0: return areaCode; @@ -37,6 +39,7 @@ public OfficePhone(java.lang.Long areaCode, java.lang.Long number) { } } // Used by DatumReader. Applications should not call. + @Override @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) {