diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java index 51746b0ce4..93c792d979 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java @@ -34,7 +34,6 @@ import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hive.hplsql.executor.QueryExecutor; import org.apache.hive.hplsql.executor.QueryResult; @@ -179,9 +178,9 @@ void copyToFile(HplsqlParser.Copy_stmtContext ctx, QueryResult query) throws Exc else { filename = ctx.copy_target().getText(); } - byte[] del = DFSUtil.string2Bytes(delimiter); - byte[] rowdel = DFSUtil.string2Bytes("\n"); - byte[] nullstr = DFSUtil.string2Bytes("NULL"); + byte[] del = delimiter.getBytes(); + byte[] rowdel = "\n".getBytes(); + byte[] nullstr = "NULL".getBytes(); int cols = query.columnCount(); int rows = 0; long bytes = 0; @@ -234,7 +233,7 @@ void copyToFile(HplsqlParser.Copy_stmtContext ctx, QueryResult query) throws Exc if (sqlInsert) { col = Utils.quoteString(col); } - byte[] b = DFSUtil.string2Bytes(col); + byte[] b = col.getBytes(); out.write(b); bytes += b.length; } diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/serde/LazySimpleSerDeBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/serde/LazySimpleSerDeBench.java index 3532c51a2a..5dbc571df5 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/serde/LazySimpleSerDeBench.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/serde/LazySimpleSerDeBench.java @@ -20,7 +20,6 @@ import java.util.Random; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazy.LazyByte; import org.apache.hadoop.hive.serde2.lazy.LazyDate; @@ -125,8 +124,8 @@ public void setup() { for (int i = 0; i < sizes.length / 2; i++) { int p = r.nextInt(max); int n = -1 * (p - 1); - byte[] ps = DFSUtil.string2Bytes(String.format("%d", p)); - byte[] ns = DFSUtil.string2Bytes(String.format("%d", n)); + byte[] ps = String.format("%d", p).getBytes(); + byte[] ns = String.format("%d", n).getBytes(); sizes[2 * i] = ps.length; sizes[2 * i + 1] = ns.length; offsets[2 * i] = len; @@ -498,7 +497,7 @@ public void setup() { for (int i = 0; i < DEFAULT_DATA_SIZE; i++) { // -ve dates are also valid dates - the dates are within 1959 to 2027 Date dt = new Date(base + (Math.abs(r.nextLong()) % (Integer.MAX_VALUE*1000L))); - byte[] ds = DFSUtil.string2Bytes(dt.toString()); + byte[] ds = dt.toString().getBytes(); sizes[i] = ds.length; offsets[i] = len; len += ds.length; @@ -581,7 +580,7 @@ public void setup() { for (int i = 0; i < DEFAULT_DATA_SIZE; i++) { // -ve dates are also valid Timestamps - dates are within 1959 to 2027 Date dt = new Date(base + (Math.abs(r.nextLong()) % (Integer.MAX_VALUE * 1000L))); - byte[] ds = DFSUtil.string2Bytes(String.format("%s 00:00:01", dt.toString())); + byte[] ds = String.format("%s 00:00:01", dt.toString()).getBytes(); sizes[i] = ds.length; offsets[i] = len; len += ds.length; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HostAffinitySplitLocationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HostAffinitySplitLocationProvider.java index 664dec9278..a1d422b486 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HostAffinitySplitLocationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HostAffinitySplitLocationProvider.java @@ -25,7 +25,6 @@ import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.split.SplitLocationProvider; -import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hive.common.util.Murmur3; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -106,7 +105,7 @@ public static int determineLocation( // There is the drawback of potentially hashing the same data on multiple nodes though, when a // large split is sent to 1 node, and a second invocation uses smaller chunks of the previous // large split and send them to different nodes. - byte[] pathBytes = DFSUtil.string2Bytes(path); + byte[] pathBytes = path.getBytes(); byte[] allBytes = new byte[pathBytes.length + 8]; System.arraycopy(pathBytes, 0, allBytes, 0, pathBytes.length); SerDeUtils.writeLong(allBytes, pathBytes.length, start >> 3); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java index 8e9acef578..461711f8e3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritableV2; -import org.apache.hadoop.hdfs.DFSUtil; import java.sql.Date; import java.text.SimpleDateFormat; @@ -52,7 +51,7 @@ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { @Override protected void func(BytesColumnVector outV, long[] vector, int i) { dt.setTime(DateWritableV2.daysToMillis((int) vector[i])); - yte[] temp = DFSUtil.string2Bytes(formatter.format(dt)); + byte[] temp = formatter.format(dt).getBytes(); assign(outV, i, temp, temp.length); } @@ -68,7 +67,7 @@ void sqlFormat(BytesColumnVector outV, long[] vector, int i, outV.noNulls = false; return; } - byte[] temp = DFSUtil.string2Bytes(formattedDate); + byte[] temp = formattedDate.getBytes(); assign(outV, i, temp, temp.length); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java index f9fd8dd720..04623341ee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; -import org.apache.hadoop.hdfs.DFSUtil; /** * Cast input float to a decimal. Get target value scale from output column vector. @@ -44,7 +43,7 @@ protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i) { HiveDecimalWritable decWritable = outV.vector[i]; // TEMPORARY: In order to avoid a new version of storage-api, do the conversion here... - byte[] floatBytes = DFSUtil.string2Bytes(Float.toString((float)inV.vector[i])); + byte[] floatBytes = Float.toString((float) inV.vector[i]).getBytes(); decWritable.setFromBytes(floatBytes, 0, floatBytes.length); if (!decWritable.mutateEnforcePrecisionScale(outV.precision, outV.scale)) { outV.isNull[i] = true; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java index 2729372056..58e5d45da4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java @@ -21,7 +21,6 @@ import org.apache.hadoop.hive.common.format.datetime.HiveSqlDateTimeFormatter; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; -import org.apache.hadoop.hdfs.DFSUtil; import java.sql.Timestamp; import java.time.Instant; @@ -77,7 +76,7 @@ void sqlFormat(BytesColumnVector outV, TimestampColumnVector inV, int i, outV.noNulls = false; return; } - byte[] temp = DFSUtil.string2Bytes(formattedString); + byte[] temp = formattedString.getBytes(); assign(outV, i, temp, temp.length); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java index c751ab1410..321ccb73b2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hdfs.DFSUtil; /** * Test helper class that creates vectorized execution batches from arbitrary type iterables. @@ -124,7 +123,7 @@ public void assign( Object value) { BytesColumnVector bcv = (BytesColumnVector) columnVector; String s = (String) value; - byte[] bytes = DFSUtil.string2Bytes(s); + byte[] bytes = s.getBytes(); bcv.vector[row] = bytes; bcv.start[row] = 0; bcv.length[row] = bytes.length; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java index 8d06ac7eb3..f200aa26e6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; import org.apache.hadoop.hive.serde2.io.DateWritableV2; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; -import org.apache.hadoop.hdfs.DFSUtil; public class VectorColumnGroupGenerator { @@ -604,7 +603,7 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde BytesColumnVector bytesColVec = ((BytesColumnVector) colVector); for (int i = 0; i < size; i++) { if (!isNull[i]) { - byte[] bytes = DFSUtil.string2Bytes(stringArray[i]); + byte[] bytes = stringArray[i].getBytes(); bytesColVec.setVal(i, bytes); } } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java index 247dcfe009..cc598d2430 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java @@ -41,7 +41,6 @@ import org.apache.hadoop.hive.serde2.io.DateWritableV2; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritableV2; -import org.apache.hadoop.hdfs.DFSUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -243,7 +242,7 @@ public void writeString(byte[] v, int start, int length) throws IOException { @Override public void writeHiveChar(HiveChar hiveChar) throws IOException { String string = hiveChar.getStrippedValue(); - byte[] bytes = DFSUtil.string2Bytes(string); + byte[] bytes = string.getBytes(); writeString(bytes); } @@ -253,7 +252,7 @@ public void writeHiveChar(HiveChar hiveChar) throws IOException { @Override public void writeHiveVarchar(HiveVarchar hiveVarchar) throws IOException { String string = hiveVarchar.getValue(); - byte[] bytes=DFSUtil.string2Bytes(string); + byte[] bytes = string.getBytes(); writeString(bytes); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java index b499c0a6e3..5085da0799 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java @@ -45,7 +45,6 @@ import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; -import static org.apache.hadoop.hdfs.DFSUtil; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.LIST; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.MAP; import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.STRUCT; @@ -288,7 +287,7 @@ public void writeString(byte[] v, int start, int length) throws IOException { @Override public void writeHiveChar(HiveChar hiveChar) throws IOException { final String string = hiveChar.getStrippedValue(); - final byte[] bytes=DFSUtil.string2Bytes(string); + final byte[] bytes = string.getBytes(); writeString(bytes); } @@ -298,7 +297,7 @@ public void writeHiveChar(HiveChar hiveChar) throws IOException { @Override public void writeHiveVarchar(HiveVarchar hiveVarchar) throws IOException { final String string = hiveVarchar.getValue(); - final byte[] bytes=DFSUtil.string2Bytes(string); + final byte[] bytes = string.getBytes(); writeString(bytes); }