diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index 156f755..7b17373 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -49,11 +50,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.Serializable; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; @@ -319,14 +320,7 @@ public static MetaData readMetaData(FileSystem fs, Path metadataPath) public static String readAsString(final FileSystem fs, final Path fromMetadataPath) throws IOException { try (FSDataInputStream stream = fs.open(fromMetadataPath)) { - byte[] buffer = new byte[1024]; - ByteArrayOutputStream sb = new ByteArrayOutputStream(); - int read = stream.read(buffer); - while (read != -1) { - sb.write(buffer, 0, read); - read = stream.read(buffer); - } - return new String(sb.toByteArray(), "UTF-8"); + return IOUtils.toString(stream, StandardCharsets.UTF_8); } }