diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java index 3794afc..0df4598 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java @@ -121,8 +121,8 @@ private static void scanAndVerify( HashSet primaryRowKeys = new HashSet(); HashSet separateRowKeys = new HashSet(); for (Tuple2 item: output) { - String key = new String(item._1.getBytes()); - String value = new String(item._2.getBytes()); + String key = bytesWritableToString(item._1); + String value = bytesWritableToString(item._2); String prefix = key.substring(0, key.indexOf('_')); Long id = Long.valueOf(key.substring(5 + prefix.length())); if (prefix.equals(prefix1)) { @@ -139,6 +139,18 @@ private static void scanAndVerify( assertEquals(primaryRows, primaryRowKeys.size()); } + /** + * Convert a BytesWritable to a string. + * Don't use {@link BytesWritable#copyBytes()} + * so as to be compatible with hadoop 1 + */ + private static String bytesWritableToString(BytesWritable bw) { + int size = bw.getLength(); + byte[] bytes = new byte[size]; + System.arraycopy(bw.getBytes(), 0, bytes, 0, size); + return new String(bytes); + } + private static class MyHiveFunctionResultList extends HiveBaseFunctionResultList { private static final long serialVersionUID = -1L;