Index: hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (date 1555524830000) +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (date 1555569322000) @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; -import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; Index: hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java (date 1555524830000) +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableSnapshotInputFormat.java (date 1555569322000) @@ -23,11 +23,9 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.mapred.TableInputFormat; import org.apache.hadoop.hbase.mapred.TableSnapshotInputFormat; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; @@ -36,7 +34,6 @@ import org.apache.hadoop.mapred.Reporter; import java.io.IOException; -import java.util.List; public class HiveHBaseTableSnapshotInputFormat implements InputFormat { Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java (date 1555524830000) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java (date 1555569322000) @@ -21,7 +21,6 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.ql.exec.PTFUtils; import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.TypeCheckCtx; import org.apache.hadoop.hive.serde2.AbstractSerDe; Index: serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java (date 1555524830000) +++ serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java (date 1555569322000) @@ -54,7 +54,7 @@ // todo this should be configured in serde public static byte[] decodeIfNeeded(byte[] recv) { - boolean arrayByteBase64 = Base64.isArrayByteBase64(recv); + boolean arrayByteBase64 = Base64.isBase64(recv); if (LOG.isDebugEnabled() && arrayByteBase64) { LOG.debug("Data only contains Base64 alphabets only so try to decode the data."); }