diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveConstants.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveConstants.java index 6cdfe1b..8adb45e 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveConstants.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloHiveConstants.java @@ -16,8 +16,6 @@ */ package org.apache.hadoop.hive.accumulo; -import java.nio.charset.Charset; - /** * */ @@ -38,5 +36,4 @@ public static final String ESCAPED_ASERTISK_REGEX = Character.toString(ESCAPE) + Character.toString(ESCAPE) + Character.toString(ESCAPE) + Character.toString(ASTERISK); - public static final Charset UTF_8 = Charset.forName("UTF-8"); } diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java index e54e818..af63aac 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -38,6 +38,7 @@ import java.net.JarURLConnection; import java.net.URL; import java.net.URLConnection; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -1062,7 +1063,7 @@ private String obtainPasswordFromFile(String passwordFilePath) { try { Path path = Paths.get(passwordFilePath); byte[] passwordFileContents = Files.readAllBytes(path); - return new String(passwordFileContents, "UTF-8").trim(); + return new String(passwordFileContents, StandardCharsets.UTF_8).trim(); } catch (Exception e) { throw new RuntimeException("Unable to read user password from the password file: " + passwordFilePath, e); @@ -1561,7 +1562,8 @@ boolean isComment(String line) { String[] getCommands(File file) throws IOException { List cmds = new LinkedList(); try (BufferedReader reader = - new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"))) { + new BufferedReader(new InputStreamReader(new FileInputStream(file), + StandardCharsets.UTF_8))) { StringBuilder cmd = null; while (true) { String scriptLine = reader.readLine(); diff --git a/beeline/src/java/org/apache/hive/beeline/OutputFile.java b/beeline/src/java/org/apache/hive/beeline/OutputFile.java index 6946dbe..fa59186 100644 --- a/beeline/src/java/org/apache/hive/beeline/OutputFile.java +++ b/beeline/src/java/org/apache/hive/beeline/OutputFile.java @@ -27,6 +27,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; public class OutputFile { private final PrintStream out; @@ -35,7 +36,7 @@ public OutputFile(String filename) throws IOException { File file = new File(filename); this.filename = file.getAbsolutePath(); - this.out = new PrintStream(file, "UTF-8"); + this.out = new PrintStream(file, StandardCharsets.UTF_8.name()); } @VisibleForTesting diff --git a/beeline/src/test/org/apache/hive/beeline/TestBeeLineHistory.java b/beeline/src/test/org/apache/hive/beeline/TestBeeLineHistory.java index c8f4d4e..1ba16d3 100644 --- a/beeline/src/test/org/apache/hive/beeline/TestBeeLineHistory.java +++ b/beeline/src/test/org/apache/hive/beeline/TestBeeLineHistory.java @@ -23,6 +23,7 @@ import java.io.PrintStream; import java.io.PrintWriter; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import org.junit.AfterClass; import org.junit.Assert; @@ -64,7 +65,7 @@ public void testNumHistories() throws Exception { method.invoke(beeline); beeline.initializeConsoleReader(null); beeline.dispatch("!history"); - String output = os.toString("UTF-8"); + String output = os.toString(StandardCharsets.UTF_8.name()); int numHistories = output.split("\n").length; Assert.assertEquals(10, numHistories); beeline.close(); @@ -82,7 +83,7 @@ public void testHistory() throws Exception { method.invoke(beeline); beeline.initializeConsoleReader(null); beeline.dispatch("!history"); - String output = os.toString("UTF-8"); + String output = os.toString(StandardCharsets.UTF_8.name()); String[] tmp = output.split("\n"); Assert.assertTrue(tmp[0].equals("1 : select 1;")); Assert.assertTrue(tmp[9].equals("10 : select 10;")); diff --git a/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java b/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java index f266778..ff1faa2 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java @@ -28,6 +28,7 @@ import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CodingErrorAction; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -58,7 +59,7 @@ public RCFileCat() { super(); - decoder = Charset.forName("UTF-8").newDecoder(). + decoder = StandardCharsets.UTF_8.newDecoder(). onMalformedInput(CodingErrorAction.REPLACE). onUnmappableCharacter(CodingErrorAction.REPLACE); } diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java index 4a2bae5..b3c29c1 100644 --- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java +++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.io.PrintStream; import java.lang.reflect.Field; +import java.nio.charset.StandardCharsets; import java.security.Permission; import java.util.ArrayList; import java.util.Arrays; @@ -135,8 +136,8 @@ public void testThatCliDriverDoesNotStripComments() throws Exception { System.setOut(oldOut); System.setErr(oldErr); } - message = dataOut.toString("UTF-8"); - errors = dataErr.toString("UTF-8"); + message = dataOut.toString(StandardCharsets.UTF_8.name()); + errors = dataErr.toString(StandardCharsets.UTF_8.name()); assertTrue("Comments with '--; should not have been stripped," + " so command should fail", ret != 0); assertTrue("Comments with '--; should not have been stripped," diff --git a/common/src/java/org/apache/hive/http/StackServlet.java b/common/src/java/org/apache/hive/http/StackServlet.java index 0960cc9..c9970fb 100644 --- a/common/src/java/org/apache/hive/http/StackServlet.java +++ b/common/src/java/org/apache/hive/http/StackServlet.java @@ -22,6 +22,7 @@ import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; +import java.nio.charset.StandardCharsets; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; @@ -45,7 +46,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) } response.setContentType("text/plain; charset=UTF-8"); try (PrintStream out = new PrintStream( - response.getOutputStream(), false, "UTF-8")) { + response.getOutputStream(), false, StandardCharsets.UTF_8.name())) { printThreadInfo(out, ""); } } diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index 780a708..77c8388 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -28,6 +28,7 @@ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.concurrent.TimeUnit; @@ -186,7 +187,8 @@ public void testEncodingDecoding() throws UnsupportedEncodingException { HiveConf conf = new HiveConf(); String query = "select blah, '\u0001' from random_table"; conf.setQueryString(query); - Assert.assertEquals(URLEncoder.encode(query, "UTF-8"), conf.get(ConfVars.HIVEQUERYSTRING.varname)); + Assert.assertEquals(URLEncoder.encode(query, StandardCharsets.UTF_8.name()), + conf.get(ConfVars.HIVEQUERYSTRING.varname)); Assert.assertEquals(query, conf.getQueryString()); } } diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java index c5dc1e8..0b67fe0 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidKafkaUtils.java @@ -50,6 +50,7 @@ import java.io.IOException; import java.net.URL; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -153,7 +154,7 @@ static void updateKafkaIngestionSpec(String overlordAddress, KafkaSupervisorSpec new URL(String.format("http://%s/druid/indexer/v1/supervisor", overlordAddress))).setContent( "application/json", JSON_MAPPER.writeValueAsBytes(spec)), - new FullResponseHandler(Charset.forName("UTF-8"))); + new FullResponseHandler(StandardCharsets.UTF_8)); if (response.getStatus().equals(HttpResponseStatus.OK)) { String msg = @@ -181,7 +182,7 @@ static InputRowParser getInputRowParser(Table table, TimestampSpec timestampSpec // Default case JSON if ((parseSpecFormat == null) || "json".equalsIgnoreCase(parseSpecFormat)) { - return new StringInputRowParser(new JSONParseSpec(timestampSpec, dimensionsSpec, null, null), "UTF-8"); + return new StringInputRowParser(new JSONParseSpec(timestampSpec, dimensionsSpec, null, null), StandardCharsets.UTF_8.name()); } else if ("csv".equalsIgnoreCase(parseSpecFormat)) { return new StringInputRowParser(new CSVParseSpec(timestampSpec, dimensionsSpec, @@ -189,7 +190,7 @@ static InputRowParser getInputRowParser(Table table, TimestampSpec timestampSpec DruidStorageHandlerUtils.getListProperty(table, DruidConstants.DRUID_PARSE_SPEC_COLUMNS), DruidStorageHandlerUtils.getBooleanProperty(table, DruidConstants.DRUID_PARSE_SPEC_HAS_HEADER_ROWS, false), DruidStorageHandlerUtils.getIntegerProperty(table, DruidConstants.DRUID_PARSE_SPEC_SKIP_HEADER_ROWS, 0)), - "UTF-8"); + StandardCharsets.UTF_8.name()); } else if ("delimited".equalsIgnoreCase(parseSpecFormat)) { return new StringInputRowParser(new DelimitedParseSpec(timestampSpec, dimensionsSpec, @@ -198,7 +199,7 @@ static InputRowParser getInputRowParser(Table table, TimestampSpec timestampSpec DruidStorageHandlerUtils.getListProperty(table, DruidConstants.DRUID_PARSE_SPEC_COLUMNS), DruidStorageHandlerUtils.getBooleanProperty(table, DruidConstants.DRUID_PARSE_SPEC_HAS_HEADER_ROWS, false), DruidStorageHandlerUtils.getIntegerProperty(table, DruidConstants.DRUID_PARSE_SPEC_SKIP_HEADER_ROWS, 0)), - "UTF-8"); + StandardCharsets.UTF_8.name()); } else if ("avro".equalsIgnoreCase(parseSpecFormat)) { try { String avroSchemaLiteral = DruidStorageHandlerUtils.getTableProperty(table, DruidConstants.AVRO_SCHEMA_LITERAL); diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java index 94a3a27..d4d536f 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java @@ -107,7 +107,7 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -364,7 +364,7 @@ private void resetKafkaIngestion(String overlordAddress, String dataSourceName) new URL(String.format("http://%s/druid/indexer/v1/supervisor/%s/reset", overlordAddress, dataSourceName))), - new FullResponseHandler(Charset.forName("UTF-8"))), + new FullResponseHandler(StandardCharsets.UTF_8)), input -> input instanceof IOException, getMaxRetryCount()); if (response.getStatus().equals(HttpResponseStatus.OK)) { @@ -388,7 +388,7 @@ private void stopKafkaIngestion(String overlordAddress, String dataSourceName) { new URL(String.format("http://%s/druid/indexer/v1/supervisor/%s/shutdown", overlordAddress, dataSourceName))), - new FullResponseHandler(Charset.forName("UTF-8"))), + new FullResponseHandler(StandardCharsets.UTF_8)), input -> input instanceof IOException, getMaxRetryCount()); if (response.getStatus().equals(HttpResponseStatus.OK)) { @@ -420,7 +420,7 @@ private KafkaSupervisorSpec fetchKafkaIngestionSpec(Table table) { RetryUtils.retry(() -> DruidStorageHandlerUtils.getResponseFromCurrentLeader(getHttpClient(), new Request(HttpMethod.GET, new URL(String.format("http://%s/druid/indexer/v1/supervisor/%s", overlordAddress, dataSourceName))), - new FullResponseHandler(Charset.forName("UTF-8"))), + new FullResponseHandler(StandardCharsets.UTF_8)), input -> input instanceof IOException, getMaxRetryCount()); if (response.getStatus().equals(HttpResponseStatus.OK)) { @@ -464,7 +464,7 @@ private KafkaSupervisorSpec fetchKafkaIngestionSpec(Table table) { new URL(String.format("http://%s/druid/indexer/v1/supervisor/%s/status", overlordAddress, dataSourceName))), - new FullResponseHandler(Charset.forName("UTF-8"))), + new FullResponseHandler(StandardCharsets.UTF_8)), input -> input instanceof IOException, getMaxRetryCount()); if (response.getStatus().equals(HttpResponseStatus.OK)) { @@ -538,7 +538,7 @@ private void checkLoadStatus(List segments) { coordinatorResponse = RetryUtils.retry(() -> DruidStorageHandlerUtils.getResponseFromCurrentLeader(getHttpClient(), new Request(HttpMethod.GET, new URL(String.format("http://%s/status", coordinatorAddress))), - new FullResponseHandler(Charset.forName("UTF-8"))).getContent(), + new FullResponseHandler(StandardCharsets.UTF_8)).getContent(), input -> input instanceof IOException, maxTries); } catch (Exception e) { @@ -574,7 +574,7 @@ private void checkLoadStatus(List segments) { result = DruidStorageHandlerUtils.getResponseFromCurrentLeader(getHttpClient(), new Request(HttpMethod.GET, input), - new FullResponseHandler(Charset.forName("UTF-8"))).getContent(); + new FullResponseHandler(StandardCharsets.UTF_8)).getContent(); LOG.debug("Checking segment [{}] response is [{}]", input, result); return Strings.isNullOrEmpty(result); diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java index 6c1dbd3..15cc566 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java @@ -66,6 +66,7 @@ import java.io.InputStream; import java.net.URL; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.List; @@ -251,7 +252,7 @@ public static DruidQueryRecordReader getDruidQueryReader(String druidQueryType) String.format("http://%s/druid/v2/datasources/%s/candidates?intervals=%s", address, query.getDataSource().getNames().get(0), - URLEncoder.encode(intervals, "UTF-8")); + URLEncoder.encode(intervals, StandardCharsets.UTF_8.name())); LOG.debug("sending request {} to query for segments", request); final InputStream response; try { diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java index 7a58964..80d448f 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java @@ -25,6 +25,7 @@ import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Properties; @@ -70,8 +71,10 @@ public static void main(String[] args) { CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { - ss.out = new SessionStream(System.out, true, "UTF-8"); - ss.err = new SessionStream(System.err, true, "UTF-8"); + ss.out = + new SessionStream(System.out, true, StandardCharsets.UTF_8.name()); + ss.err = + new SessionStream(System.err, true, StandardCharsets.UTF_8.name()); } catch (UnsupportedEncodingException e) { System.exit(1); } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java index 2641add..d929b87 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Iterator; @@ -48,8 +49,6 @@ */ public abstract class ReaderWriter { - private static final String UTF8 = "UTF-8"; - public static Object readDatum(DataInput in) throws IOException { byte type = in.readByte(); @@ -58,7 +57,7 @@ public static Object readDatum(DataInput in) throws IOException { case DataType.STRING: byte[] buffer = new byte[in.readInt()]; in.readFully(buffer); - return new String(buffer, UTF8); + return new String(buffer, StandardCharsets.UTF_8); case DataType.INTEGER: VIntWritable vint = new VIntWritable(); @@ -191,7 +190,7 @@ public static void writeDatum(DataOutput out, Object val) throws IOException { case DataType.STRING: String s = (String) val; - byte[] utfBytes = s.getBytes(ReaderWriter.UTF8); + byte[] utfBytes = s.getBytes(StandardCharsets.UTF_8); out.writeInt(utfBytes.length); out.write(utfBytes); return; diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java index 174fbae..3bf3f15 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.Map; import java.util.Properties; @@ -180,7 +181,8 @@ static public void createInputFile(FileSystem fs, String fileName, throw new IOException("File " + fileName + " already exists on the minicluster"); } FSDataOutputStream stream = fs.create(path); - PrintWriter pw = new PrintWriter(new OutputStreamWriter(stream, "UTF-8")); + PrintWriter pw = + new PrintWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8)); for (int i = 0; i < inputData.length; i++) { pw.println(inputData[i]); } diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java index d476b43..dde7744 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java @@ -20,6 +20,7 @@ import java.io.UnsupportedEncodingException; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -99,7 +100,7 @@ rlist.add(new HiveVarchar("hive\nvarchar", 20)); rlist.add(Date.valueOf("2014-01-07")); rlist.add(Timestamp.ofEpochMilli(System.currentTimeMillis())); - rlist.add("hive\nbinary".getBytes("UTF-8")); + rlist.add("hive\nbinary".getBytes(StandardCharsets.UTF_8)); List nlist = new ArrayList(13); nlist.add(null); // tinyint diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java index b190e4b..ad105e4 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java @@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.rcfile; import java.io.IOException; -import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Properties; @@ -90,33 +90,30 @@ private static BytesRefArrayWritable s = null; static { - try { - bytesArray = new byte[][]{"123".getBytes("UTF-8"), - "456".getBytes("UTF-8"), "789".getBytes("UTF-8"), - "1000".getBytes("UTF-8"), "5.3".getBytes("UTF-8"), - "hive and hadoop".getBytes("UTF-8"), new byte[0], - "NULL".getBytes("UTF-8")}; - s = new BytesRefArrayWritable(bytesArray.length); - s.set(0, new BytesRefWritable("123".getBytes("UTF-8"))); - s.set(1, new BytesRefWritable("456".getBytes("UTF-8"))); - s.set(2, new BytesRefWritable("789".getBytes("UTF-8"))); - s.set(3, new BytesRefWritable("1000".getBytes("UTF-8"))); - s.set(4, new BytesRefWritable("5.3".getBytes("UTF-8"))); - s.set(5, new BytesRefWritable("hive and hadoop".getBytes("UTF-8"))); - s.set(6, new BytesRefWritable("NULL".getBytes("UTF-8"))); - s.set(7, new BytesRefWritable("NULL".getBytes("UTF-8"))); - - // partial test init - patialS.set(0, new BytesRefWritable("NULL".getBytes("UTF-8"))); - patialS.set(1, new BytesRefWritable("NULL".getBytes("UTF-8"))); - patialS.set(2, new BytesRefWritable("789".getBytes("UTF-8"))); - patialS.set(3, new BytesRefWritable("1000".getBytes("UTF-8"))); - patialS.set(4, new BytesRefWritable("NULL".getBytes("UTF-8"))); - patialS.set(5, new BytesRefWritable("NULL".getBytes("UTF-8"))); - patialS.set(6, new BytesRefWritable("NULL".getBytes("UTF-8"))); - patialS.set(7, new BytesRefWritable("NULL".getBytes("UTF-8"))); - } catch (UnsupportedEncodingException e) { - } + bytesArray = new byte[][]{"123".getBytes(StandardCharsets.UTF_8), + "456".getBytes(StandardCharsets.UTF_8), "789".getBytes(StandardCharsets.UTF_8), + "1000".getBytes(StandardCharsets.UTF_8), "5.3".getBytes(StandardCharsets.UTF_8), + "hive and hadoop".getBytes(StandardCharsets.UTF_8), new byte[0], + "NULL".getBytes(StandardCharsets.UTF_8)}; + s = new BytesRefArrayWritable(bytesArray.length); + s.set(0, new BytesRefWritable("123".getBytes(StandardCharsets.UTF_8))); + s.set(1, new BytesRefWritable("456".getBytes(StandardCharsets.UTF_8))); + s.set(2, new BytesRefWritable("789".getBytes(StandardCharsets.UTF_8))); + s.set(3, new BytesRefWritable("1000".getBytes(StandardCharsets.UTF_8))); + s.set(4, new BytesRefWritable("5.3".getBytes(StandardCharsets.UTF_8))); + s.set(5, new BytesRefWritable("hive and hadoop".getBytes(StandardCharsets.UTF_8))); + s.set(6, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + s.set(7, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + + // partial test init + patialS.set(0, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + patialS.set(1, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + patialS.set(2, new BytesRefWritable("789".getBytes(StandardCharsets.UTF_8))); + patialS.set(3, new BytesRefWritable("1000".getBytes(StandardCharsets.UTF_8))); + patialS.set(4, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + patialS.set(5, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + patialS.set(6, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); + patialS.set(7, new BytesRefWritable("NULL".getBytes(StandardCharsets.UTF_8))); } diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java index 43ac527..513f84c 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/AcidTableSerializer.java @@ -22,7 +22,7 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; @@ -77,7 +77,7 @@ public static String encode(AcidTable table) throws IOException { data.close(); } - return PROLOG_V1 + new String(Base64.encodeBase64(bytes.toByteArray()), Charset.forName("UTF-8")); + return PROLOG_V1 + new String(Base64.encodeBase64(bytes.toByteArray()), StandardCharsets.UTF_8); } /** Returns the {@link AcidTable} instance decoded from a base 64 representation. */ diff --git a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java index d0aa86f..f1c27b9 100644 --- a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java +++ b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java @@ -33,6 +33,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -49,7 +50,7 @@ public String serializeTable(HCatTable hcatTable) throws HCatException { try { return new TSerializer(new TJSONProtocol.Factory()) - .toString(hcatTable.toHiveTable(), "UTF-8"); + .toString(hcatTable.toHiveTable(), StandardCharsets.UTF_8.name()); } catch (TException exception) { throw new HCatException("Could not serialize HCatTable: " + hcatTable, exception); @@ -60,7 +61,7 @@ public String serializeTable(HCatTable hcatTable) throws HCatException { public HCatTable deserializeTable(String hcatTableStringRep) throws HCatException { try { Table table = new Table(); - new TDeserializer(new TJSONProtocol.Factory()).deserialize(table, hcatTableStringRep, "UTF-8"); + new TDeserializer(new TJSONProtocol.Factory()).deserialize(table, hcatTableStringRep, StandardCharsets.UTF_8.name()); return new HCatTable(table); } catch(TException exception) { @@ -74,7 +75,7 @@ public HCatTable deserializeTable(String hcatTableStringRep) throws HCatExceptio public String serializePartition(HCatPartition hcatPartition) throws HCatException { try { return new TSerializer(new TJSONProtocol.Factory()) - .toString(hcatPartition.toHivePartition(), "UTF-8"); + .toString(hcatPartition.toHivePartition(), StandardCharsets.UTF_8.name()); } catch (TException exception) { throw new HCatException("Could not serialize HCatPartition: " + hcatPartition, exception); @@ -85,7 +86,7 @@ public String serializePartition(HCatPartition hcatPartition) throws HCatExcepti public HCatPartition deserializePartition(String hcatPartitionStringRep) throws HCatException { try { Partition partition = new Partition(); - new TDeserializer(new TJSONProtocol.Factory()).deserialize(partition, hcatPartitionStringRep, "UTF-8"); + new TDeserializer(new TJSONProtocol.Factory()).deserialize(partition, hcatPartitionStringRep, StandardCharsets.UTF_8.name()); return new HCatPartition(null, partition); } catch(TException exception) { @@ -103,7 +104,7 @@ public HCatPartition deserializePartition(String hcatPartitionStringRep) throws List stringReps = new ArrayList(); TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); for (PartitionSpec partitionSpec : hcatPartitionSpec.partitionSpecProxy.toPartitionSpec()) { - stringReps.add(serializer.toString(partitionSpec, "UTF-8")); + stringReps.add(serializer.toString(partitionSpec, StandardCharsets.UTF_8.name())); } return stringReps; } @@ -119,7 +120,7 @@ public HCatPartitionSpec deserializePartitionSpec(List hcatPartitionSpec TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory()); for (String stringRep : hcatPartitionSpecStrings) { PartitionSpec partSpec = new PartitionSpec(); - deserializer.deserialize(partSpec, stringRep, "UTF-8"); + deserializer.deserialize(partSpec, stringRep, StandardCharsets.UTF_8.name()); partitionSpecList.add(partSpec); } return new HCatPartitionSpec(null, PartitionSpecProxy.Factory.get(partitionSpecList)); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java index df8c32e..1b3581e 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java @@ -27,6 +27,7 @@ import java.net.URL; import java.net.URLConnection; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; @@ -425,7 +426,7 @@ public static String findContainingJar(Class clazz, String fileNamePattern) { if ("jar".equals(url.getProtocol())) { String toReturn = url.getPath(); if (fileNamePattern == null || toReturn.matches(fileNamePattern)) { - toReturn = URLDecoder.decode(toReturn, "UTF-8"); + toReturn = URLDecoder.decode(toReturn, StandardCharsets.UTF_8.name()); return toReturn.replaceAll("!.*$", ""); } } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java index 1fc8d36..ed66394 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java @@ -19,6 +19,7 @@ package org.apache.hive.hcatalog.templeton.tool; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -53,8 +54,6 @@ public static final String ZK_SESSION_TIMEOUT = "templeton.zookeeper.session-timeout"; - public static final String ENCODING = "UTF-8"; - private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperStorage.class); private CuratorFramework zk; @@ -200,9 +199,9 @@ public String getPath(Type type) { private void setFieldData(Type type, String id, String name, String val) throws Exception { try { zk.create().withMode(CreateMode.PERSISTENT).withACL(Ids.OPEN_ACL_UNSAFE) - .forPath(makeFieldZnode(type, id, name), val.getBytes(ENCODING)); + .forPath(makeFieldZnode(type, id, name), val.getBytes(StandardCharsets.UTF_8)); } catch (KeeperException.NodeExistsException e) { - zk.setData().forPath(makeFieldZnode(type, id, name), val.getBytes(ENCODING)); + zk.setData().forPath(makeFieldZnode(type, id, name), val.getBytes(StandardCharsets.UTF_8)); } } @@ -238,7 +237,7 @@ public void saveField(Type type, String id, String key, String val) public String getField(Type type, String id, String key) { try { byte[] b = zk.getData().forPath(makeFieldZnode(type, id, key)); - return new String(b, ENCODING); + return new String(b, StandardCharsets.UTF_8); } catch (Exception e) { return null; } diff --git a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java index 78598a7..7f8b519 100644 --- a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java +++ b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java @@ -38,6 +38,7 @@ import org.eclipse.jetty.http.HttpStatus; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -67,7 +68,6 @@ private static final String username= "johndoe"; private static final String ERROR_CODE = "errorCode"; private static Main templetonServer; - private static final String charSet = "UTF-8"; @BeforeClass public static void startHebHcatInMem() throws Exception { @@ -340,7 +340,7 @@ private static MethodCallRetVal doHttpCall(String uri, HTTP_METHOD_TYPE type, Ma } String msgBody = JsonBuilder.mapToJson(data); LOG.info("Msg Body: " + msgBody); - StringRequestEntity sre = new StringRequestEntity(msgBody, "application/json", charSet); + StringRequestEntity sre = new StringRequestEntity(msgBody, "application/json", StandardCharsets.UTF_8.name()); ((PutMethod)method).setRequestEntity(sre); break; default: diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java index 9e27ba1..1467ee3 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java @@ -19,6 +19,7 @@ package org.apache.hive.hplsql; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.InputStream; @@ -814,7 +815,7 @@ Integer init(String[] args) throws Exception { } InputStream input = null; if (execString != null) { - input = new ByteArrayInputStream(execString.getBytes("UTF-8")); + input = new ByteArrayInputStream(execString.getBytes(StandardCharsets.UTF_8)); } else { input = new FileInputStream(execFile); @@ -883,7 +884,7 @@ void includeRcFile() { */ boolean includeFile(String file, boolean showError) { try { - String content = FileUtils.readFileToString(new java.io.File(file), "UTF-8"); + String content = FileUtils.readFileToString(new java.io.File(file), StandardCharsets.UTF_8); if (content != null && !content.isEmpty()) { if (trace) { trace(null, "INCLUDE CONTENT " + file + " (non-empty)"); @@ -904,7 +905,7 @@ boolean includeFile(String file, boolean showError) { * Execute statements from an include file */ void include(String content) throws Exception { - InputStream input = new ByteArrayInputStream(content.getBytes("UTF-8")); + InputStream input = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); HplsqlLexer lexer = new HplsqlLexer(new ANTLRInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); HplsqlParser parser = new HplsqlParser(tokens); @@ -1491,7 +1492,7 @@ String createLocalUdf() { } try { String file = System.getProperty("user.dir") + "/" + Conf.HPLSQL_LOCALS_SQL; - PrintWriter writer = new PrintWriter(file, "UTF-8"); + PrintWriter writer = new PrintWriter(file, StandardCharsets.UTF_8.name()); writer.print(localUdf); writer.close(); return file; diff --git a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java index cd0e938..f1ef1a2 100644 --- a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java +++ b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java @@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.io.StringReader; +import java.nio.charset.StandardCharsets; + import org.apache.commons.io.FileUtils; import org.junit.Assert; import org.junit.Test; @@ -407,7 +409,7 @@ void run(String testFile) throws Exception { exec.run(args); String s = getTestOutput(out.toString()).trim(); FileUtils.writeStringToFile(new java.io.File("target/tmp/log/" + testFile + ".out.txt"), s); - String t = FileUtils.readFileToString(new java.io.File("src/test/results/local/" + testFile + ".out.txt"), "utf-8").trim(); + String t = FileUtils.readFileToString(new java.io.File("src/test/results/local/" + testFile + ".out.txt"), StandardCharsets.UTF_8).trim(); System.setOut(null); Assert.assertEquals(s, t); } diff --git a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java index b48c8c5..1c0ca37 100644 --- a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java +++ b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java @@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.io.StringReader; +import java.nio.charset.StandardCharsets; + import org.apache.commons.io.FileUtils; import org.junit.Assert; import org.junit.Test; @@ -118,7 +120,7 @@ void run(String testFile) throws Exception { exec.run(args); String s = getTestOutput(out.toString()).trim(); FileUtils.writeStringToFile(new java.io.File("target/tmp/log/" + testFile + ".out.txt"), s); - String t = FileUtils.readFileToString(new java.io.File("src/test/results/offline/" + testFile + ".out.txt"), "utf-8").trim(); + String t = FileUtils.readFileToString(new java.io.File("src/test/results/offline/" + testFile + ".out.txt"), StandardCharsets.UTF_8).trim(); System.setOut(null); Assert.assertEquals(s, t); } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index 5fd0ef9..be75c8e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -21,6 +21,7 @@ import java.io.UnsupportedEncodingException; import java.lang.reflect.Proxy; +import java.nio.charset.StandardCharsets; import java.util.LinkedList; import java.util.Map; @@ -135,8 +136,10 @@ public void testSimpleQuery() { CliSessionState ss = new CliSessionState(hconf); ss.in = System.in; try { - ss.out = new SessionStream(System.out, true, "UTF-8"); - ss.err = new SessionStream(System.err, true, "UTF-8"); + ss.out = + new SessionStream(System.out, true, StandardCharsets.UTF_8.name()); + ss.err = + new SessionStream(System.err, true, StandardCharsets.UTF_8.name()); } catch (UnsupportedEncodingException e) { System.exit(3); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java index b6d01ce..8639d4b 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeelinePasswordOption.java @@ -25,6 +25,7 @@ import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; @@ -267,7 +268,7 @@ private String connectWithPromptAndVerify(List argList, String prompt, "Expected " + expectedHiveConfValue + " got " + hiveConfValue + " for " + hiveConfKey, expectedHiveConfValue.equalsIgnoreCase(hiveConfValue)); } - String output = os.toString("UTF-8"); + String output = os.toString(StandardCharsets.UTF_8.name()); LOG.debug(output); return output; } finally { diff --git a/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java b/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java index 06ada23..4c80fb8 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/beeline/hs2connection/BeelineWithHS2ConnectionFileTestBase.java @@ -26,6 +26,7 @@ import java.io.PrintStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.sql.DriverManager; import java.util.ArrayList; import java.util.HashMap; @@ -118,7 +119,8 @@ protected Hs2ConnectionXmlConfigFileWriter() throws IOException { file.delete(); } file.createNewFile(); - writer = new PrintWriter(file.getAbsolutePath(), "UTF-8"); + writer = new PrintWriter(file.getAbsolutePath(), + StandardCharsets.UTF_8.name()); } finally { file.deleteOnExit(); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java index d2e9514..2386c27 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java @@ -30,6 +30,7 @@ import java.lang.reflect.Field; import java.math.BigDecimal; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; @@ -436,11 +437,11 @@ public void testDataTypes() throws Exception { assertEquals(Timestamp.valueOf("2012-04-22 09:00:00.123456789"), rowValues[16]); assertEquals(new BigDecimal("123456789.123456"), rowValues[17]); - assertArrayEquals("abcd".getBytes("UTF-8"), (byte[]) rowValues[18]); + assertArrayEquals("abcd".getBytes(StandardCharsets.UTF_8), (byte[]) rowValues[18]); assertEquals(Date.valueOf("2013-01-01"), rowValues[19]); assertEquals("abc123", rowValues[20]); assertEquals("abc123 ", rowValues[21]); - assertArrayEquals("X'01FF'".getBytes("UTF-8"), (byte[]) rowValues[22]); + assertArrayEquals("X'01FF'".getBytes(StandardCharsets.UTF_8), (byte[]) rowValues[22]); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 45aac5f..63a0455 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -51,6 +51,7 @@ import java.lang.Exception; import java.lang.Object; import java.lang.String; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; @@ -908,7 +909,7 @@ public void testDataTypes() throws Exception { assertEquals("abc123", res.getString(21)); assertEquals("abc123 ", res.getString(22)); - byte[] bytes = "X'01FF'".getBytes("UTF-8"); + byte[] bytes = "X'01FF'".getBytes(StandardCharsets.UTF_8); InputStream resultSetInputStream = res.getBinaryStream(23); int len = bytes.length; byte[] b = new byte[len]; diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java index b69a2f9..c69e319 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java @@ -21,6 +21,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertArrayEquals; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Timestamp; import java.util.List; @@ -280,11 +282,11 @@ public void testDataTypes() throws Exception { assertEquals(Timestamp.valueOf("2012-04-22 09:00:00.123456"), rowValues[16]); assertEquals(new BigDecimal("123456789.123456"), rowValues[17]); - assertArrayEquals("abcd".getBytes("UTF-8"), (byte[]) rowValues[18]); + assertArrayEquals("abcd".getBytes(StandardCharsets.UTF_8), (byte[]) rowValues[18]); assertEquals(Date.valueOf("2013-01-01"), rowValues[19]); assertEquals("abc123", rowValues[20]); assertEquals("abc123 ", rowValues[21]); - assertArrayEquals("X'01FF'".getBytes("UTF-8"), (byte[]) rowValues[22]); + assertArrayEquals("X'01FF'".getBytes(StandardCharsets.UTF_8), (byte[]) rowValues[22]); } /** diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapVectorArrow.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapVectorArrow.java index 55a2df8..e22ee47 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapVectorArrow.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapVectorArrow.java @@ -21,6 +21,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertArrayEquals; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; + import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Timestamp; import java.util.List; @@ -224,11 +226,11 @@ public void testDataTypes() throws Exception { assertEquals(Timestamp.valueOf("2012-04-22 09:00:00.123456"), rowValues[16]); assertEquals(new BigDecimal("123456789.123456"), rowValues[17]); - assertArrayEquals("abcd".getBytes("UTF-8"), (byte[]) rowValues[18]); + assertArrayEquals("abcd".getBytes(StandardCharsets.UTF_8), (byte[]) rowValues[18]); assertEquals(Date.valueOf("2013-01-01"), rowValues[19]); assertEquals("abc123", rowValues[20]); assertEquals("abc123 ", rowValues[21]); - assertArrayEquals("X'01FF'".getBytes("UTF-8"), (byte[]) rowValues[22]); + assertArrayEquals("X'01FF'".getBytes(StandardCharsets.UTF_8), (byte[]) rowValues[22]); } } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java index bd5e811..4df3583 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestServiceDiscovery.java @@ -34,7 +34,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -164,7 +164,7 @@ private void publishConfsToZk(Map confs, String uri) throws Exce String znodeData = ""; // Publish configs for this instance as the data on the node znodeData = Joiner.on(';').withKeyValueSeparator("=").join(confs); - byte[] znodeDataUTF8 = znodeData.getBytes(Charset.forName("UTF-8")); + byte[] znodeDataUTF8 = znodeData.getBytes(StandardCharsets.UTF_8); PersistentEphemeralNode znode = new PersistentEphemeralNode(client, PersistentEphemeralNode.Mode.EPHEMERAL_SEQUENTIAL, pathPrefix, znodeDataUTF8); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java index 386560f..77d1317 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java @@ -28,6 +28,7 @@ import static org.junit.Assert.fail; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; @@ -865,7 +866,7 @@ public void testDataTypes() throws Exception { assertEquals("abc123", res.getString(21)); assertEquals("abc123 ", res.getString(22)); - byte[] bytes = "X'01FF'".getBytes("UTF-8"); + byte[] bytes = "X'01FF'".getBytes(StandardCharsets.UTF_8); InputStream resultSetInputStream = res.getBinaryStream(23); int len = bytes.length; byte[] b = new byte[len]; diff --git a/itests/qtest-druid/src/main/java/org/apache/hive/kafka/SingleNodeKafkaCluster.java b/itests/qtest-druid/src/main/java/org/apache/hive/kafka/SingleNodeKafkaCluster.java index 348bc78..22e62f9 100644 --- a/itests/qtest-druid/src/main/java/org/apache/hive/kafka/SingleNodeKafkaCluster.java +++ b/itests/qtest-druid/src/main/java/org/apache/hive/kafka/SingleNodeKafkaCluster.java @@ -26,7 +26,7 @@ import java.io.File; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Properties; import java.util.concurrent.ExecutionException; @@ -123,7 +123,7 @@ public void createTopicWithData(String topicName, File datafile){ new StringSerializer(), new StringSerializer() )){ - List events = Files.readLines(datafile, Charset.forName("UTF-8")); + List events = Files.readLines(datafile, StandardCharsets.UTF_8); for(String event : events){ producer.send(new ProducerRecord<>(topicName, "key", event)); } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java index 254cc95..6d3a0a9 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QOutProcessor.java @@ -24,6 +24,7 @@ import java.io.FileOutputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -161,8 +162,10 @@ public void maskPatterns(String fname, String tname) throws Exception { File fileOrig = new File(fname + ".orig"); FileUtils.copyFile(file, fileOrig); - in = new BufferedReader(new InputStreamReader(new FileInputStream(fileOrig), "UTF-8")); - out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8")); + in = new BufferedReader(new InputStreamReader(new FileInputStream(fileOrig), + StandardCharsets.UTF_8)); + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), + StandardCharsets.UTF_8)); boolean lastWasMasked = false; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 9fe9d05..b9a87a8 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -140,7 +140,6 @@ public class QTestUtil { private static final Logger LOG = LoggerFactory.getLogger("QTestUtil"); - private static final String UTF_8 = "UTF-8"; // security property names private static final String SECURITY_KEY_PROVIDER_URI_NAME = "dfs.encryption.key.provider.uri"; @@ -745,7 +744,9 @@ public void shutdown() throws Exception { public String readEntireFileIntoString(File queryFile) throws IOException { InputStreamReader isr = - new InputStreamReader(new BufferedInputStream(new FileInputStream(queryFile)), QTestUtil.UTF_8); + new InputStreamReader( + new BufferedInputStream(new FileInputStream(queryFile)), + StandardCharsets.UTF_8); StringWriter sw = new StringWriter(); try { IOUtils.copy(isr, sw); @@ -1164,15 +1165,15 @@ private void setSessionOutputs(String fileName, CliSessionState ss, File outf) t ss.out.flush(); } if (qSortQuerySet.contains(fileName)) { - ss.out = new SortPrintStream(fo, "UTF-8"); + ss.out = new SortPrintStream(fo, StandardCharsets.UTF_8.name()); } else if (qHashQuerySet.contains(fileName)) { - ss.out = new DigestPrintStream(fo, "UTF-8"); + ss.out = new DigestPrintStream(fo, StandardCharsets.UTF_8.name()); } else if (qSortNHashQuerySet.contains(fileName)) { - ss.out = new SortAndDigestPrintStream(fo, "UTF-8"); + ss.out = new SortAndDigestPrintStream(fo, StandardCharsets.UTF_8.name()); } else { - ss.out = new SessionStream(fo, true, "UTF-8"); + ss.out = new SessionStream(fo, true, StandardCharsets.UTF_8.name()); } - ss.err = new CachingPrintStream(fo, true, "UTF-8"); + ss.err = new CachingPrintStream(fo, true, StandardCharsets.UTF_8.name()); ss.setIsSilent(true); } @@ -1622,17 +1623,19 @@ private static QTestProcessExecResult executeCmd(String[] args, String outFile, out = outFile == null ? SessionState.getConsole().getChildOutStream() : - new PrintStream(new FileOutputStream(outFile), true, "UTF-8"); + new PrintStream(new FileOutputStream(outFile), true, + StandardCharsets.UTF_8.name()); PrintStream err = errFile == null ? SessionState.getConsole().getChildErrStream() : - new PrintStream(new FileOutputStream(errFile), true, "UTF-8"); + new PrintStream(new FileOutputStream(errFile), true, + StandardCharsets.UTF_8.name()); Process executor = Runtime.getRuntime().exec(args); ByteArrayOutputStream bos = new ByteArrayOutputStream(); - PrintStream str = new PrintStream(bos, true, "UTF-8"); + PrintStream str = new PrintStream(bos, true, StandardCharsets.UTF_8.name()); StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, err); StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, out, str); diff --git a/itests/util/src/main/java/org/apache/hive/beeline/ConvertedOutputFile.java b/itests/util/src/main/java/org/apache/hive/beeline/ConvertedOutputFile.java index fbff900..13662e0 100644 --- a/itests/util/src/main/java/org/apache/hive/beeline/ConvertedOutputFile.java +++ b/itests/util/src/main/java/org/apache/hive/beeline/ConvertedOutputFile.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.io.SortPrintStream; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; /** * Class for representing an OutputFile, into which the writes are converted by the existing @@ -70,17 +71,17 @@ void fetchFinished() { public enum Converter { SORT_QUERY_RESULTS { public PrintStream getConvertedPrintStream(PrintStream inner) throws Exception { - return new SortPrintStream(inner, "UTF-8"); + return new SortPrintStream(inner, StandardCharsets.UTF_8.name()); } }, HASH_QUERY_RESULTS { public PrintStream getConvertedPrintStream(PrintStream inner) throws Exception { - return new DigestPrintStream(inner, "UTF-8"); + return new DigestPrintStream(inner, StandardCharsets.UTF_8.name()); } }, SORT_AND_HASH_QUERY_RESULTS { public PrintStream getConvertedPrintStream(PrintStream inner) throws Exception { - return new SortAndDigestPrintStream(inner, "UTF-8"); + return new SortAndDigestPrintStream(inner, StandardCharsets.UTF_8.name()); } }, NONE { diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java index 6e64c53..a7f1f7a 100644 --- a/itests/util/src/main/java/org/apache/hive/beeline/QFile.java +++ b/itests/util/src/main/java/org/apache/hive/beeline/QFile.java @@ -215,7 +215,8 @@ private String sortInputOutput(String source) { * @throws IOException */ public void filterOutput() throws IOException { - String output = FileUtils.readFileToString(rawOutputFile, "UTF-8"); + String output = + FileUtils.readFileToString(rawOutputFile, StandardCharsets.UTF_8); if (comparePortable) { output = portableFilterSet.filter(output); } @@ -277,7 +278,7 @@ private QTestProcessExecResult executeDiff() throws IOException, InterruptedExce new String[diffCommandArgs.size()])); ByteArrayOutputStream bos = new ByteArrayOutputStream(); - PrintStream out = new PrintStream(bos, true, "UTF-8"); + PrintStream out = new PrintStream(bos, true, StandardCharsets.UTF_8.name()); StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, System.err); StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out, out); @@ -436,7 +437,8 @@ public QFile getQFile(String name) throws IOException { result.afterExecuteLogFile = new File(logDirectory, name + ".q.afterExecute.log"); result.useSharedDatabase = useSharedDatabase; result.converter = Converter.NONE; - String input = FileUtils.readFileToString(result.inputFile, "UTF-8"); + String input = + FileUtils.readFileToString(result.inputFile, StandardCharsets.UTF_8); if (input.contains("-- SORT_QUERY_RESULTS")) { result.converter = Converter.SORT_QUERY_RESULTS; } @@ -465,7 +467,8 @@ private File prepareExpectedOutputFile (String name, boolean comparePortable) th return new File(resultsDirectory, name + ".q.out"); } else { File rawExpectedOutputFile = new File(resultsDirectory, name + ".q.out"); - String rawOutput = FileUtils.readFileToString(rawExpectedOutputFile, "UTF-8"); + String rawOutput = FileUtils.readFileToString(rawExpectedOutputFile, + StandardCharsets.UTF_8); rawOutput = portableFilterSet.filter(rawOutput); File expectedOutputFile = new File(logDirectory, name + ".q.out.portable"); FileUtils.writeStringToFile(expectedOutputFile, rawOutput); diff --git a/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java b/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java index be4c6e8..213d902 100644 --- a/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java +++ b/itests/util/src/main/java/org/apache/hive/beeline/QFileBeeLineClient.java @@ -25,6 +25,7 @@ import java.io.File; import java.io.IOException; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; @@ -69,7 +70,7 @@ protected QFileBeeLineClient(String jdbcUrl, String jdbcDriver, String username, File log) throws IOException { logFile = log; beeLine = new BeeLine(); - beelineOutputStream = new PrintStream(logFile, "UTF-8"); + beelineOutputStream = new PrintStream(logFile, StandardCharsets.UTF_8.name()); beeLine.setOutputStream(beelineOutputStream); beeLine.setErrorStream(beelineOutputStream); beeLine.runCommands( diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/QueryConditionBuilder.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/QueryConditionBuilder.java index 194fad8..b791ffe 100644 --- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/QueryConditionBuilder.java +++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/QueryConditionBuilder.java @@ -26,6 +26,7 @@ import java.beans.XMLDecoder; import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -121,7 +122,8 @@ private String createConditionString(String filterXml, Map colum return EMPTY_STRING; } - try (XMLDecoder decoder = new XMLDecoder(new ByteArrayInputStream(filterXml.getBytes("UTF-8")))) { + try (XMLDecoder decoder = new XMLDecoder( + new ByteArrayInputStream(filterXml.getBytes(StandardCharsets.UTF_8)))) { Object object = decoder.readObject(); if (!(object instanceof ExprNodeDesc)) { LOGGER.error("Deserialized filter expression is not of the expected type"); diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 8d5aa70..23fbe09 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -82,6 +82,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; +import java.nio.charset.StandardCharsets; import java.security.AccessControlContext; import java.security.AccessController; import java.security.KeyStore; @@ -373,7 +374,8 @@ private void executeInitSql() throws SQLException { List initSqlList = null; try { FileInputStream input = new FileInputStream(file); - br = new BufferedReader(new InputStreamReader(input, "UTF-8")); + br = new BufferedReader( + new InputStreamReader(input, StandardCharsets.UTF_8)); String line; StringBuilder sb = new StringBuilder(""); while ((line = br.readLine()) != null) { diff --git a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java index f86b112..7975670 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java @@ -22,6 +22,7 @@ import java.io.Reader; import java.math.BigDecimal; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.sql.Array; import java.sql.Blob; import java.sql.Clob; @@ -274,7 +275,8 @@ public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException */ public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { - String str = new Scanner(x, "UTF-8").useDelimiter("\\A").next(); + String str = new Scanner(x, StandardCharsets.UTF_8.name()) + .useDelimiter("\\A").next(); setString(parameterIndex, str); } diff --git a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java index 0468f7b..a597858 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java +++ b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java @@ -18,7 +18,7 @@ package org.apache.hive.jdbc; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -115,7 +115,7 @@ private static void updateParamsWithZKServerNode(JdbcConnectionParams connParams String dataStr = new String( zooKeeperClient.getData().forPath("/" + zooKeeperNamespace + "/" + serverNode), - Charset.forName("UTF-8")); + StandardCharsets.UTF_8); // If dataStr is not null and dataStr is not a KV pattern, // it must be the server uri added by an older version HS2 Matcher matcher = kvPattern.matcher(dataStr); diff --git a/llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java b/llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java index 83e5246..aca76f0 100644 --- a/llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java +++ b/llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java @@ -15,6 +15,7 @@ package org.apache.hadoop.hive.llap.tez; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -187,7 +188,7 @@ private static EntityDescriptorProto convertToProto( if (descriptor.getHistoryText() != null) { try { builder.setHistoryText(TezCommonUtils.compressByteArrayToByteString( - descriptor.getHistoryText().getBytes("UTF-8"))); + descriptor.getHistoryText().getBytes(StandardCharsets.UTF_8))); } catch (IOException e) { throw new TezUncheckedException(e); } diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java index 698a56e..18c413f 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/services/impl/TestLlapWebServices.java @@ -26,6 +26,7 @@ import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.StandardCharsets; public class TestLlapWebServices { @@ -58,7 +59,7 @@ private String getURLResponseAsString(String baseURL) throws IOException { HttpURLConnection conn = (HttpURLConnection) url.openConnection(); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); StringWriter writer = new StringWriter(); - IOUtils.copy(conn.getInputStream(), writer, "UTF-8"); + IOUtils.copy(conn.getInputStream(), writer, StandardCharsets.UTF_8); return writer.toString(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 01ecf0a..576d6a6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -24,6 +24,7 @@ import java.io.PrintStream; import java.io.Serializable; import java.net.InetAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -2756,7 +2757,8 @@ public boolean getResults(List res) throws IOException { try { ss = Utilities.readColumn(resStream, bos); if (bos.getLength() > 0) { - row = new String(bos.getData(), 0, bos.getLength(), "UTF-8"); + row = new String(bos.getData(), 0, bos.getLength(), + StandardCharsets.UTF_8); } else if (ss == Utilities.StreamStatus.TERMINATED) { row = new String(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java index ac03efe..247c1fe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.io.Serializable; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; @@ -660,7 +661,7 @@ public String toThriftJSONString() throws IOException { e.printStackTrace(); return q.toString(); } - return tmb.toString("UTF-8"); + return tmb.toString(StandardCharsets.UTF_8.name()); } public String toBinaryString() throws IOException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index cb7fdf7..8b2bc45 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -2246,7 +2246,7 @@ private int showCreateDatabase(Hive db, DataOutputStream outStream, String datab createDb_str.append(propertiesToString).append(")\n"); } - outStream.write(createDb_str.toString().getBytes("UTF-8")); + outStream.write(createDb_str.toString().getBytes(StandardCharsets.UTF_8)); return 0; } @@ -3367,7 +3367,8 @@ private void writeToFile(String data, String file) throws IOException { FSDataOutputStream out = fs.create(resFile); try { if (data != null && !data.isEmpty()) { - OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8"); + OutputStreamWriter writer = + new OutputStreamWriter(out, StandardCharsets.UTF_8); writer.write(data); writer.write((char) terminator); writer.flush(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 2ff9ad3..01d7d6a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -42,6 +42,7 @@ import java.net.URL; import java.net.URLClassLoader; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; @@ -3893,9 +3894,9 @@ public static String jarFinderGetJar(Class klass) { if (path.startsWith("file:")) { path = path.substring("file:".length()); } - path = URLDecoder.decode(path, "UTF-8"); + path = URLDecoder.decode(path, StandardCharsets.UTF_8.name()); if ("jar".equals(url.getProtocol())) { - path = URLDecoder.decode(path, "UTF-8"); + path = URLDecoder.decode(path, StandardCharsets.UTF_8.name()); return path.replaceAll("!.*$", ""); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java index 7dc322e..b6de026 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -62,7 +63,8 @@ protected void func(DecimalColumnVector outputColVector, BytesColumnVector input * e.g. by converting to decimal from the input bytes directly without * making a new string. */ - s = new String(inputColVector.vector[i], inputColVector.start[i], inputColVector.length[i], "UTF-8"); + s = new String(inputColVector.vector[i], inputColVector.start[i], + inputColVector.length[i], StandardCharsets.UTF_8); outputColVector.set(i, HiveDecimal.create(s)); } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java index 790328d..324d295 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -148,7 +149,8 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException { private void evaluate(IntervalDayTimeColumnVector outputColVector, BytesColumnVector inputColVector, int i) { try { HiveIntervalDayTime interval = HiveIntervalDayTime.valueOf( - new String(inputColVector.vector[i], inputColVector.start[i], inputColVector.length[i], "UTF-8")); + new String(inputColVector.vector[i], inputColVector.start[i], + inputColVector.length[i], StandardCharsets.UTF_8)); outputColVector.set(i, interval); } catch (Exception e) { outputColVector.setNullValue(i); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java index 4fd0859..28dbf11 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -154,7 +155,8 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException { private void evaluate(LongColumnVector outputColVector, BytesColumnVector inputColVector, int i) { try { HiveIntervalYearMonth interval = HiveIntervalYearMonth.valueOf( - new String(inputColVector.vector[i], inputColVector.start[i], inputColVector.length[i], "UTF-8")); + new String(inputColVector.vector[i], inputColVector.start[i], + inputColVector.length[i], StandardCharsets.UTF_8)); outputColVector.vector[i] = interval.getTotalMonths(); } catch (Exception e) { outputColVector.vector[i] = 1; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToTimestamp.java index b48b013..c526753 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToTimestamp.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; import java.util.Arrays; +import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -149,7 +150,7 @@ private void evaluate(TimestampColumnVector outputColVector, BytesColumnVector i PrimitiveObjectInspectorUtils.getTimestampFromString( new String( inputColVector.vector[i], inputColVector.start[i], inputColVector.length[i], - "UTF-8")); + StandardCharsets.UTF_8)); outputColVector.set(i, timestamp.toSqlTimestamp()); } catch (Exception e) { outputColVector.setNullValue(i); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java index 94d37f7..ea4ffc2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterStringColRegExpStringScalar.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.Text; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; @@ -44,11 +45,7 @@ public FilterStringColRegExpStringScalar() { public FilterStringColRegExpStringScalar(int colNum, byte [] regExpPattern) throws HiveException { super(colNum, null); - try { - super.setPattern(new String(regExpPattern, "UTF-8")); - } catch (Exception ex) { - throw new HiveException(ex); - } + super.setPattern(new String(regExpPattern, StandardCharsets.UTF_8)); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java index caedc80..8bf94fb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.io.Text; +import java.nio.charset.StandardCharsets; import java.sql.Date; import java.sql.Timestamp; import java.text.ParseException; @@ -111,7 +112,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException { case CHAR: case VARCHAR: try { - date.setTime(formatter.parse(new String(bytesValue, "UTF-8")).getTime()); + date.setTime(formatter.parse(new String(bytesValue, StandardCharsets.UTF_8)).getTime()); baseDate = DateWritableV2.dateToDays(date); break; } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java index 28addf7..22a1583 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.io.Text; +import java.nio.charset.StandardCharsets; import java.sql.Date; import java.sql.Timestamp; import java.text.ParseException; @@ -112,7 +113,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException { case CHAR: case VARCHAR: try { - date.setTime(formatter.parse(new String(stringValue, "UTF-8")).getTime()); + date.setTime(formatter.parse(new String(stringValue, StandardCharsets.UTF_8)).getTime()); baseDate = DateWritableV2.dateToDays(date); break; } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index 5dbf634..c119ab9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -83,7 +83,7 @@ import com.google.common.annotations.VisibleForTesting; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; /** * Utilities that are shared by all of the ACID input and output formats. They @@ -2050,7 +2050,6 @@ public static boolean isRawFormatFile(Path dataFile, FileSystem fs) throws IOExc public static final class OrcAcidVersion { private static final String ACID_VERSION_KEY = "hive.acid.version"; private static final String ACID_FORMAT = "_orc_acid_version"; - private static final Charset UTF8 = Charset.forName("UTF-8"); public static final int ORC_ACID_VERSION_DEFAULT = 0; /** * 2 is the version of Acid released in Hive 3.0. @@ -2062,7 +2061,8 @@ public static boolean isRawFormatFile(Path dataFile, FileSystem fs) throws IOExc */ public static void setAcidVersionInDataFile(Writer writer) { //so that we know which version wrote the file - writer.addUserMetadata(ACID_VERSION_KEY, UTF8.encode(String.valueOf(ORC_ACID_VERSION))); + writer.addUserMetadata(ACID_VERSION_KEY, + StandardCharsets.UTF_8.encode(String.valueOf(ORC_ACID_VERSION))); } /** * This is smart enough to handle streaming ingest where there could be a @@ -2080,7 +2080,8 @@ public static int getAcidVersionFromDataFile(Path dataFile, FileSystem fs) throw //make sure to check for side file in case streaming ingest died .maxLength(getLogicalLength(fs, fileStatus))); if (orcReader.hasMetadataValue(ACID_VERSION_KEY)) { - char[] versionChar = UTF8.decode(orcReader.getMetadataValue(ACID_VERSION_KEY)).array(); + char[] versionChar = StandardCharsets.UTF_8 + .decode(orcReader.getMetadataValue(ACID_VERSION_KEY)).array(); String version = new String(versionChar); return Integer.valueOf(version); } @@ -2094,7 +2095,8 @@ public static void writeVersionFile(Path deltaOrBaseDir, FileSystem fs) throws Path formatFile = getVersionFilePath(deltaOrBaseDir); if(!fs.exists(formatFile)) { try (FSDataOutputStream strm = fs.create(formatFile, false)) { - strm.write(UTF8.encode(String.valueOf(ORC_ACID_VERSION)).array()); + strm.write(StandardCharsets.UTF_8 + .encode(String.valueOf(ORC_ACID_VERSION)).array()); } catch (IOException ioe) { LOG.error("Failed to create " + formatFile + " due to: " + ioe.getMessage(), ioe); throw ioe; @@ -2116,7 +2118,7 @@ public static int getAcidVersionFromMetaFile(Path deltaOrBaseDir, FileSystem fs) byte[] bytes = new byte[1]; int read = inputStream.read(bytes); if (read != -1) { - String version = new String(bytes, UTF8); + String version = new String(bytes, StandardCharsets.UTF_8); return Integer.valueOf(version); } return ORC_ACID_VERSION_DEFAULT; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FixAcidKeyIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FixAcidKeyIndex.java index 8a7437e..f2d319e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FixAcidKeyIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FixAcidKeyIndex.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -62,8 +62,7 @@ public final static Logger LOG = LoggerFactory.getLogger(FixAcidKeyIndex.class); public static final String DEFAULT_BACKUP_PATH = System.getProperty("java.io.tmpdir"); - private static final Charset UTF8 = Charset.forName("UTF-8"); - private static final CharsetDecoder utf8Decoder = UTF8.newDecoder(); + private static final CharsetDecoder utf8Decoder = StandardCharsets.UTF_8.newDecoder(); public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); @@ -260,7 +259,8 @@ static void recoverFile(Configuration conf, Path inputPath, String backup) throw } // Finally add the fixed acid key index. - writer.addUserMetadata(OrcRecordUpdater.ACID_KEY_INDEX_NAME, UTF8.encode(keyIndexString)); + writer.addUserMetadata(OrcRecordUpdater.ACID_KEY_INDEX_NAME, + StandardCharsets.UTF_8.encode(keyIndexString)); } // Confirm the file is really fixed, and replace the old file. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java index 6d4578e..7ac4aa1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -91,8 +91,8 @@ final static int DELTA_BUFFER_SIZE = 16 * 1024; final static long DELTA_STRIPE_SIZE = 16 * 1024 * 1024; - private static final Charset UTF8 = Charset.forName("UTF-8"); - private static final CharsetDecoder utf8Decoder = UTF8.newDecoder(); + private static final CharsetDecoder utf8Decoder = + StandardCharsets.UTF_8.newDecoder(); private final AcidOutputFormat.Options options; private final AcidUtils.AcidOperationalProperties acidOperationalProperties; @@ -688,9 +688,9 @@ public void preFooterWrite(OrcFile.WriterContext context preStripeWrite(context); } context.getWriter().addUserMetadata(ACID_KEY_INDEX_NAME, - UTF8.encode(lastKey.toString())); + StandardCharsets.UTF_8.encode(lastKey.toString())); context.getWriter().addUserMetadata(OrcAcidUtils.ACID_STATS, - UTF8.encode(acidStats.serialize())); + StandardCharsets.UTF_8.encode(acidStats.serialize())); } void addKey(int op, long transaction, int bucket, long rowId) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java index 113a6e3..5d236d2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java @@ -26,6 +26,7 @@ import java.lang.reflect.Method; import java.net.URL; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.text.MessageFormat; import java.util.Enumeration; import java.util.HashMap; @@ -178,7 +179,7 @@ private static String findContainingJar(Class my_class, Map pa // either unencoded or encoded as "%20"). Replace +s first, so // that they are kept sacred during the decoding process. toReturn = toReturn.replaceAll("\\+", "%2B"); - toReturn = URLDecoder.decode(toReturn, "UTF-8"); + toReturn = URLDecoder.decode(toReturn, StandardCharsets.UTF_8.name()); return toReturn.replaceAll("!.*$", ""); } } @@ -245,9 +246,9 @@ public static String jarFinderGetJar(Class klass) { if (path.startsWith("file:")) { path = path.substring("file:".length()); } - path = URLDecoder.decode(path, "UTF-8"); + path = URLDecoder.decode(path, StandardCharsets.UTF_8.name()); if ("jar".equals(url.getProtocol())) { - path = URLDecoder.decode(path, "UTF-8"); + path = URLDecoder.decode(path, StandardCharsets.UTF_8.name()); return path.replaceAll("!.*$", ""); } else if ("file".equals(url.getProtocol())) { String klassName = klass.getName(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index 2ff1d94..3cc3d4b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -21,7 +21,7 @@ import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -99,13 +99,14 @@ public void error(OutputStream out, String errorMessage, int errorCode, String s throws HiveException { try { - out.write(errorMessage.getBytes("UTF-8")); + out.write(errorMessage.getBytes(StandardCharsets.UTF_8)); if(errorDetail != null) { - out.write(errorDetail.getBytes("UTF-8")); + out.write(errorDetail.getBytes(StandardCharsets.UTF_8)); } out.write(errorCode); if(sqlState != null) { - out.write(sqlState.getBytes("UTF-8"));//this breaks all the tests in .q files + //this breaks all the tests in .q files + out.write(sqlState.getBytes(StandardCharsets.UTF_8)); } out.write(terminator); } catch (Exception e) { @@ -124,7 +125,7 @@ public void showTables(DataOutputStream out, Set tables) try { while (iterTbls.hasNext()) { // create a row per table name - out.write(iterTbls.next().getBytes("UTF-8")); + out.write(iterTbls.next().getBytes(StandardCharsets.UTF_8)); out.write(terminator); } } catch (IOException e) { @@ -172,7 +173,8 @@ public void showMaterializedViews(DataOutputStream out, List materialized } // In case the query is served by HiveServer2, don't pad it with spaces, // as HiveServer2 output is consumed by JDBC/ODBC clients. - out.write(mdt.renderTable(!SessionState.get().isHiveServerQuery()).getBytes("UTF-8")); + out.write(mdt.renderTable(!SessionState.get().isHiveServerQuery()) + .getBytes(StandardCharsets.UTF_8)); out.write(terminator); } catch (IOException e) { throw new HiveException(e); @@ -230,7 +232,7 @@ public void describeTable(DataOutputStream outStream, String colPath, output = output.concat(str.toString()); } } - outStream.write(output.getBytes("UTF-8")); + outStream.write(output.getBytes(StandardCharsets.UTF_8)); if (tableName.equals(colPath)) { if (isFormatted) { @@ -239,7 +241,7 @@ public void describeTable(DataOutputStream outStream, String colPath, } else { output = MetaDataFormatUtils.getTableInformation(tbl, isOutputPadded); } - outStream.write(output.getBytes("UTF-8")); + outStream.write(output.getBytes(StandardCharsets.UTF_8)); if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) || (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) || @@ -248,7 +250,7 @@ public void describeTable(DataOutputStream outStream, String colPath, cInfo != null && !cInfo.getCheckConstraints().isEmpty() || dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) { output = MetaDataFormatUtils.getConstraintsInformation(pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo); - outStream.write(output.getBytes("UTF-8")); + outStream.write(output.getBytes(StandardCharsets.UTF_8)); } } @@ -258,18 +260,21 @@ public void describeTable(DataOutputStream outStream, String colPath, outStream.write(terminator); if (part != null) { // show partition information - outStream.write(("Detailed Partition Information").getBytes("UTF-8")); + outStream.write(("Detailed Partition Information") + .getBytes(StandardCharsets.UTF_8)); outStream.write(separator); - outStream.write(part.getTPartition().toString().getBytes("UTF-8")); + outStream.write(part.getTPartition().toString() + .getBytes(StandardCharsets.UTF_8)); outStream.write(separator); // comment column is empty outStream.write(terminator); } else { // show table information - outStream.write(("Detailed Table Information").getBytes("UTF-8")); + outStream.write(("Detailed Table Information") + .getBytes(StandardCharsets.UTF_8)); outStream.write(separator); String tableDesc = HiveStringUtils.escapeJava(tbl.getTTable().toString()); - outStream.write(tableDesc.getBytes("UTF-8")); + outStream.write(tableDesc.getBytes(StandardCharsets.UTF_8)); outStream.write(separator); outStream.write(terminator); } @@ -279,38 +284,38 @@ public void describeTable(DataOutputStream outStream, String colPath, (dInfo!= null && !dInfo.getDefaultConstraints().isEmpty()) || (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) || (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty())) { - outStream.write(("Constraints").getBytes("UTF-8")); + outStream.write(("Constraints").getBytes(StandardCharsets.UTF_8)); outStream.write(separator); if (pkInfo != null && !pkInfo.getColNames().isEmpty()) { - outStream.write(pkInfo.toString().getBytes("UTF-8")); + outStream.write(pkInfo.toString().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) { - outStream.write(fkInfo.toString().getBytes("UTF-8")); + outStream.write(fkInfo.toString().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) { - outStream.write(ukInfo.toString().getBytes("UTF-8")); + outStream.write(ukInfo.toString().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) { - outStream.write(nnInfo.toString().getBytes("UTF-8")); + outStream.write(nnInfo.toString().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) { - outStream.write(dInfo.toString().getBytes("UTF-8")); + outStream.write(dInfo.toString().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) { - outStream.write(cInfo.toString().getBytes("UTF-8")); + outStream.write(cInfo.toString().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } } if (storageHandlerInfo!= null) { - outStream.write(("StorageHandlerInfo").getBytes("UTF-8")); + outStream.write(("StorageHandlerInfo").getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(storageHandlerInfo.formatAsText().getBytes("UTF-8")); + outStream.write(storageHandlerInfo.formatAsText().getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } } @@ -364,21 +369,21 @@ public void showTableStatus(DataOutputStream outStream, "partition_columns", tbl.getPartCols()); } - outStream.write(("tableName:" + tableName).getBytes("UTF-8")); + outStream.write(("tableName:" + tableName).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("owner:" + owner).getBytes("UTF-8")); + outStream.write(("owner:" + owner).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("location:" + tblLoc).getBytes("UTF-8")); + outStream.write(("location:" + tblLoc).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("inputformat:" + inputFormattCls).getBytes("UTF-8")); + outStream.write(("inputformat:" + inputFormattCls).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("outputformat:" + outputFormattCls).getBytes("UTF-8")); + outStream.write(("outputformat:" + outputFormattCls).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("columns:" + ddlCols).getBytes("UTF-8")); + outStream.write(("columns:" + ddlCols).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("partitioned:" + isPartitioned).getBytes("UTF-8")); + outStream.write(("partitioned:" + isPartitioned).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); - outStream.write(("partitionColumns:" + partitionCols).getBytes("UTF-8")); + outStream.write(("partitionColumns:" + partitionCols).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); // output file system information Path tblPath = tbl.getPath(); @@ -463,56 +468,56 @@ private void writeFileSystemStats(DataOutputStream outStream, String unknownString = "unknown"; for (int k = 0; k < indent; k++) { - outStream.write(Utilities.INDENT.getBytes("UTF-8")); + outStream.write(Utilities.INDENT.getBytes(StandardCharsets.UTF_8)); } - outStream.write("totalNumberFiles:".getBytes("UTF-8")); - outStream.write((unknown ? unknownString : "" + fd.numOfFiles).getBytes("UTF-8")); + outStream.write("totalNumberFiles:".getBytes(StandardCharsets.UTF_8)); + outStream.write((unknown ? unknownString : "" + fd.numOfFiles).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); if (fd.numOfErasureCodedFiles > 0) { - outStream.write("totalNumberErasureCodedFiles:".getBytes("UTF-8")); - outStream.write((unknown ? unknownString : "" + fd.numOfErasureCodedFiles).getBytes("UTF-8")); + outStream.write("totalNumberErasureCodedFiles:".getBytes(StandardCharsets.UTF_8)); + outStream.write((unknown ? unknownString : "" + fd.numOfErasureCodedFiles).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } for (int k = 0; k < indent; k++) { - outStream.write(Utilities.INDENT.getBytes("UTF-8")); + outStream.write(Utilities.INDENT.getBytes(StandardCharsets.UTF_8)); } - outStream.write("totalFileSize:".getBytes("UTF-8")); - outStream.write((unknown ? unknownString : "" + fd.totalFileSize).getBytes("UTF-8")); + outStream.write("totalFileSize:".getBytes(StandardCharsets.UTF_8)); + outStream.write((unknown ? unknownString : "" + fd.totalFileSize).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); for (int k = 0; k < indent; k++) { - outStream.write(Utilities.INDENT.getBytes("UTF-8")); + outStream.write(Utilities.INDENT.getBytes(StandardCharsets.UTF_8)); } - outStream.write("maxFileSize:".getBytes("UTF-8")); - outStream.write((unknown ? unknownString : "" + fd.maxFileSize).getBytes("UTF-8")); + outStream.write("maxFileSize:".getBytes(StandardCharsets.UTF_8)); + outStream.write((unknown ? unknownString : "" + fd.maxFileSize).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); for (int k = 0; k < indent; k++) { - outStream.write(Utilities.INDENT.getBytes("UTF-8")); + outStream.write(Utilities.INDENT.getBytes(StandardCharsets.UTF_8)); } - outStream.write("minFileSize:".getBytes("UTF-8")); + outStream.write("minFileSize:".getBytes(StandardCharsets.UTF_8)); if (fd.numOfFiles > 0) { - outStream.write((unknown ? unknownString : "" + fd.minFileSize).getBytes("UTF-8")); + outStream.write((unknown ? unknownString : "" + fd.minFileSize).getBytes(StandardCharsets.UTF_8)); } else { - outStream.write((unknown ? unknownString : "" + 0).getBytes("UTF-8")); + outStream.write((unknown ? unknownString : "" + 0).getBytes(StandardCharsets.UTF_8)); } outStream.write(terminator); for (int k = 0; k < indent; k++) { - outStream.write(Utilities.INDENT.getBytes("UTF-8")); + outStream.write(Utilities.INDENT.getBytes(StandardCharsets.UTF_8)); } - outStream.write("lastAccessTime:".getBytes("UTF-8")); + outStream.write("lastAccessTime:".getBytes(StandardCharsets.UTF_8)); outStream.writeBytes((unknown || fd.lastAccessTime < 0) ? unknownString : "" + fd.lastAccessTime); outStream.write(terminator); for (int k = 0; k < indent; k++) { - outStream.write(Utilities.INDENT.getBytes("UTF-8")); + outStream.write(Utilities.INDENT.getBytes(StandardCharsets.UTF_8)); } - outStream.write("lastUpdateTime:".getBytes("UTF-8")); - outStream.write((unknown ? unknownString : "" + fd.lastUpdateTime).getBytes("UTF-8")); + outStream.write("lastUpdateTime:".getBytes(StandardCharsets.UTF_8)); + outStream.write((unknown ? unknownString : "" + fd.lastUpdateTime).getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } @@ -568,9 +573,9 @@ public void showTablePartitions(DataOutputStream outStream, List parts) SessionState ss = SessionState.get(); if (ss != null && ss.getConf() != null && !ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_DECODE_PARTITION_NAME)) { - outStream.write(part.getBytes("UTF-8")); + outStream.write(part.getBytes(StandardCharsets.UTF_8)); } else { - outStream.write(FileUtils.unescapePathName(part).getBytes("UTF-8")); + outStream.write(FileUtils.unescapePathName(part).getBytes(StandardCharsets.UTF_8)); } outStream.write(terminator); } @@ -589,7 +594,7 @@ public void showDatabases(DataOutputStream outStream, List databases) try { for (String database : databases) { // create a row per database name - outStream.write(database.getBytes("UTF-8")); + outStream.write(database.getBytes(StandardCharsets.UTF_8)); outStream.write(terminator); } } catch (IOException e) { @@ -605,26 +610,26 @@ public void showDatabaseDescription(DataOutputStream outStream, String database, String location, String ownerName, String ownerType, Map params) throws HiveException { try { - outStream.write(database.getBytes("UTF-8")); + outStream.write(database.getBytes(StandardCharsets.UTF_8)); outStream.write(separator); if (comment != null) { - outStream.write(HiveStringUtils.escapeJava(comment).getBytes("UTF-8")); + outStream.write(HiveStringUtils.escapeJava(comment).getBytes(StandardCharsets.UTF_8)); } outStream.write(separator); if (location != null) { - outStream.write(location.getBytes("UTF-8")); + outStream.write(location.getBytes(StandardCharsets.UTF_8)); } outStream.write(separator); if (ownerName != null) { - outStream.write(ownerName.getBytes("UTF-8")); + outStream.write(ownerName.getBytes(StandardCharsets.UTF_8)); } outStream.write(separator); if (ownerType != null) { - outStream.write(ownerType.getBytes("UTF-8")); + outStream.write(ownerType.getBytes(StandardCharsets.UTF_8)); } outStream.write(separator); if (params != null && !params.isEmpty()) { - outStream.write(params.toString().getBytes("UTF-8")); + outStream.write(params.toString().getBytes(StandardCharsets.UTF_8)); } outStream.write(terminator); } catch (IOException e) { @@ -632,24 +637,22 @@ public void showDatabaseDescription(DataOutputStream outStream, String database, } } - private static final Charset UTF_8 = Charset.forName("UTF-8"); - public void showResourcePlans(DataOutputStream out, List resourcePlans) throws HiveException { try { for (WMResourcePlan plan : resourcePlans) { - out.write(plan.getName().getBytes(UTF_8)); + out.write(plan.getName().getBytes(StandardCharsets.UTF_8)); out.write(separator); - out.write(plan.getStatus().name().getBytes(UTF_8)); + out.write(plan.getStatus().name().getBytes(StandardCharsets.UTF_8)); out.write(separator); if (plan.isSetQueryParallelism()) { - out.write(Integer.toString(plan.getQueryParallelism()).getBytes(UTF_8)); + out.write(Integer.toString(plan.getQueryParallelism()).getBytes(StandardCharsets.UTF_8)); } else { write(out, "null"); } out.write(separator); if (plan.isSetDefaultPoolPath()) { - out.write(plan.getDefaultPoolPath().getBytes(UTF_8)); + out.write(plan.getDefaultPoolPath().getBytes(StandardCharsets.UTF_8)); } else { write(out, "null"); } @@ -737,14 +740,14 @@ private void writeFields(Object ... kvPairs) return; } out.write('['); - out.write(kvPairs[0].toString().getBytes(UTF_8)); + out.write(kvPairs[0].toString().getBytes(StandardCharsets.UTF_8)); out.write('='); - out.write((kvPairs[1] == null ? "null" : kvPairs[1].toString()).getBytes(UTF_8)); + out.write((kvPairs[1] == null ? "null" : kvPairs[1].toString()).getBytes(StandardCharsets.UTF_8)); for (int i = 2; i < kvPairs.length; i += 2) { out.write(','); - out.write(kvPairs[i].toString().getBytes(UTF_8)); + out.write(kvPairs[i].toString().getBytes(StandardCharsets.UTF_8)); out.write('='); - out.write((kvPairs[i + 1] == null ? "null" : kvPairs[i + 1].toString()).getBytes(UTF_8)); + out.write((kvPairs[i + 1] == null ? "null" : kvPairs[i + 1].toString()).getBytes(StandardCharsets.UTF_8)); } out.write(']'); } @@ -768,7 +771,7 @@ public void formatMappingType(String type, List names) throws IOExceptio final int maxList = 5; writeIndent(false); write(out, "mapped for "); - out.write(type.toLowerCase().getBytes(UTF_8)); + out.write(type.toLowerCase().getBytes(StandardCharsets.UTF_8)); if (!names.isEmpty()) { write(out, "s: "); int count = Math.min(maxList, names.size()); @@ -776,11 +779,11 @@ public void formatMappingType(String type, List names) throws IOExceptio if (i != 0) { write(out, ", "); } - out.write(names.get(i).getBytes(UTF_8)); + out.write(names.get(i).getBytes(StandardCharsets.UTF_8)); } int remaining = names.size() - count; if (remaining > 0) { - out.write((" and " + remaining + " others").getBytes(UTF_8)); + out.write((" and " + remaining + " others").getBytes(StandardCharsets.UTF_8)); } } out.write(terminator); @@ -805,7 +808,7 @@ public void showFullResourcePlan(DataOutputStream out, WMFullResourcePlan fullRe } private static byte[] str(String str) { - return str.getBytes(UTF_8); + return str.getBytes(StandardCharsets.UTF_8); } private static void write(DataOutputStream out, String val) throws IOException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index 156f755..9b18009 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -49,6 +49,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.nimbusds.jose.util.StandardCharset; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.Serializable; @@ -326,7 +328,7 @@ public static String readAsString(final FileSystem fs, final Path fromMetadataPa sb.write(buffer, 0, read); read = stream.read(buffer); } - return new String(sb.toByteArray(), "UTF-8"); + return new String(sb.toByteArray(), StandardCharset.UTF_8); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java index 15b7e13..e68645a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java @@ -26,6 +26,7 @@ import org.apache.thrift.protocol.TJSONProtocol; import java.io.IOException; +import java.nio.charset.StandardCharsets; public class DBSerializer implements JsonWriter.Serializer { public static final String FIELD_NAME = "db"; @@ -44,7 +45,7 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi ); TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); try { - String value = serializer.toString(dbObject, UTF_8); + String value = serializer.toString(dbObject, StandardCharsets.UTF_8.name()); writer.jsonGenerator.writeStringField(FIELD_NAME, value); } catch (TException e) { throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java index b68e887..0d0190c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java @@ -32,6 +32,7 @@ import org.apache.thrift.protocol.TJSONProtocol; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -74,8 +75,8 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi "all"); writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.CURR_STATE_ID.toString(), additionalPropertiesProvider.getCurrentReplicationState()); - writer.jsonGenerator - .writeStringField(FIELD_NAME, serializer.toString(copyObj, UTF_8)); + writer.jsonGenerator.writeStringField(FIELD_NAME, + serializer.toString(copyObj, StandardCharsets.UTF_8.name())); } catch (TException e) { throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java index e20be68..f8b474f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/JsonWriter.java @@ -48,7 +48,6 @@ public void close() throws IOException { } public interface Serializer { - String UTF_8 = "UTF-8"; void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java index ecd4c84..521a5e8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java @@ -27,6 +27,7 @@ import org.apache.thrift.protocol.TJSONProtocol; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Map; public class PartitionSerializer implements JsonWriter.Serializer { @@ -59,7 +60,8 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi additionalPropertiesProvider.getCurrentReplicationState()); } } - writer.jsonGenerator.writeString(serializer.toString(partition, UTF_8)); + writer.jsonGenerator.writeString( + serializer.toString(partition, StandardCharsets.UTF_8.name())); writer.jsonGenerator.flush(); } catch (TException e) { throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java index dac20d2..0832a22 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java @@ -32,6 +32,7 @@ import org.slf4j.LoggerFactory; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Map; public class TableSerializer implements JsonWriter.Serializer { @@ -62,7 +63,8 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi try { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); writer.jsonGenerator - .writeStringField(FIELD_NAME, serializer.toString(tTable, UTF_8)); + .writeStringField(FIELD_NAME, + serializer.toString(tTable, StandardCharsets.UTF_8.name())); writer.jsonGenerator.writeFieldName(PartitionSerializer.FIELD_NAME); writePartitions(writer, additionalPropertiesProvider); } catch (TException e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/MetadataJson.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/MetadataJson.java index b04fdef..0cc7ba5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/MetadataJson.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/MetadataJson.java @@ -36,11 +36,10 @@ import org.json.JSONException; import org.json.JSONObject; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; -import static org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter.Serializer.UTF_8; - public class MetadataJson { private final JSONObject json; private final TDeserializer deserializer; @@ -79,7 +78,7 @@ private Table table() throws TException { if (json == null) { return null; } - deserializer.deserialize(intoObject, json, UTF_8); + deserializer.deserialize(intoObject, json, StandardCharsets.UTF_8.name()); return intoObject; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java index 7b96b33..f6c14b6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java @@ -22,7 +22,7 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Map; import java.util.StringTokenizer; @@ -228,7 +228,7 @@ CommandProcessorResponse compile(SessionState ss) throws CompileProcessorExcepti File fileToWrite = new File(input, this.named); try { - Files.write(this.code, fileToWrite, Charset.forName("UTF-8")); + Files.write(this.code, fileToWrite, StandardCharsets.UTF_8); } catch (IOException e1) { throw new CompileProcessorException("writing file", e1); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatch.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatch.java index 9b132c4..44dee54 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatch.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatch.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector; +import java.nio.charset.StandardCharsets; import java.util.Random; import junit.framework.Assert; @@ -263,12 +264,7 @@ private void verifyFlatten(ColumnVector v) { Assert.assertTrue(((DoubleColumnVector) v).vector[2] == 200d); } else if (v instanceof BytesColumnVector) { BytesColumnVector bv = (BytesColumnVector) v; - byte[] b = null; - try { - b = "foo".getBytes("UTF-8"); - } catch (Exception e) { - ; // eat it - } + byte[] b = "foo".getBytes(StandardCharsets.UTF_8); bv.setRef(0, b, 0, b.length); bv.flatten(true, sel, 2); Assert.assertEquals(bv.vector[0], bv.vector[2]); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestCuckooSet.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestCuckooSet.java index a0ad875..a752efc 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestCuckooSet.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestCuckooSet.java @@ -20,6 +20,7 @@ import static org.junit.Assert.*; +import java.nio.charset.StandardCharsets; import java.util.Random; import org.junit.Test; @@ -191,12 +192,7 @@ public void loadRandomBytes(byte[][] values, Random gen) { } private byte[] getUTF8Bytes(String s) { - byte[] v = null; - try { - v = s.getBytes("UTF-8"); - } catch (Exception e) { - ; // won't happen - } + byte[] v = s.getBytes(StandardCharsets.UTF_8); return v; } @@ -204,11 +200,7 @@ public void loadRandomBytes(byte[][] values, Random gen) { private byte[][] getByteArrays(String[] strings) { byte[][] values = new byte[strings.length][]; for(int i = 0; i != strings.length; i++) { - try { - values[i] = strings[i].getBytes("UTF-8"); - } catch (Exception e) { - ; // can't happen - } + values[i] = strings[i].getBytes(StandardCharsets.UTF_8); } return values; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java index d02ae02..f11c0e8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java @@ -20,6 +20,8 @@ import static org.junit.Assert.*; +import java.nio.charset.StandardCharsets; + import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; @@ -155,22 +157,13 @@ private void setString(BytesColumnVector v, int i, String s) { } private byte[] getUTF8Bytes(String s) { - byte[] b = null; - try { - b = s.getBytes("UTF-8"); - } catch (Exception e) { - ; // eat it - } + byte[] b = s.getBytes(StandardCharsets.UTF_8); return b; } private String getString(BytesColumnVector v, int i) { - String s = null; - try { - s = new String(v.vector[i], v.start[i], v.length[i], "UTF-8"); - } catch (Exception e) { - ; // eat it - } + String s = new String(v.vector[i], v.start[i], v.length[i], + StandardCharsets.UTF_8); return s; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java index b5ad22c..790ce56 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java @@ -44,6 +44,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Calendar; import java.util.List; @@ -456,7 +457,9 @@ public Void call() throws Exception { batch.cols[0] = in; batch.cols[1] = out; for (int i = 0; i < batchSize; i++) { - byte[] data = String.format("1999-%02d-%02d", 1 + (i % 12), 1 + (i % 15)).getBytes("UTF-8"); + byte[] data = + String.format("1999-%02d-%02d", 1 + (i % 12), 1 + (i % 15)) + .getBytes(StandardCharsets.UTF_8); in.setRef(i, data, 0, data.length); in.isNull[i] = false; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java index 5f4d138..0eb6a67 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -504,13 +505,9 @@ public void testFilterDoubleNotBetween() throws HiveException { static byte[] c = null; static { - try { - a = "a".getBytes("UTF-8"); - b = "b".getBytes("UTF-8"); - c = "c".getBytes("UTF-8"); - } catch (Exception e) { - ; // won't happen - } + a = "a".getBytes(StandardCharsets.UTF_8); + b = "b".getBytes(StandardCharsets.UTF_8); + c = "c".getBytes(StandardCharsets.UTF_8); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java index e9be8c1..53e04d5 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import java.util.Random; import java.util.concurrent.TimeUnit; @@ -193,13 +194,7 @@ public void testCastTimestampToDouble() throws HiveException { } public byte[] toBytes(String s) { - byte[] b = null; - try { - b = s.getBytes("UTF-8"); - } catch (Exception e) { - throw new RuntimeException("Could not convert string to UTF-8 byte array."); - } - return b; + return s.getBytes(StandardCharsets.UTF_8); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java index 46834d8..e2c1cac 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/udf/TestVectorUDFAdaptor.java @@ -20,6 +20,7 @@ import static org.junit.Assert.*; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -52,12 +53,8 @@ static byte[] red = null; static { - try { - blue = "blue".getBytes("UTF-8"); - red = "red".getBytes("UTF-8"); - } catch (Exception e) { - ; // do nothing - } + blue = "blue".getBytes(StandardCharsets.UTF_8); + red = "red".getBytes(StandardCharsets.UTF_8); } @Test @@ -160,14 +157,9 @@ public void testMultiArgumentUDF() throws HiveException { // with no nulls VectorizedRowBatch b = getBatchStrDblLongWithStrOut(); vudf.evaluate(b); - byte[] result = null; - byte[] result2 = null; - try { - result = "red:1:1.0".getBytes("UTF-8"); - result2 = "blue:0:0.0".getBytes("UTF-8"); - } catch (Exception e) { - ; - } + byte[] result = "red:1:1.0".getBytes(StandardCharsets.UTF_8); + byte[] result2 = "blue:0:0.0".getBytes(StandardCharsets.UTF_8); + BytesColumnVector out = (BytesColumnVector) b.cols[3]; int cmp = StringExpr.compare(result, 0, result.length, out.vector[1], out.start[1], out.length[1]); @@ -277,14 +269,8 @@ public void testGenericUDF() throws HiveException { VectorizedRowBatch b; - byte[] red = null; - byte[] unknown = null; - try { - red = "red".getBytes("UTF-8"); - unknown = "UNKNOWN".getBytes("UTF-8"); - } catch (Exception e) { - ; - } + byte[] red = "red".getBytes(StandardCharsets.UTF_8); + byte[] unknown = "UNKNOWN".getBytes(StandardCharsets.UTF_8); BytesColumnVector out; // with nulls diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java index 38d64aa..06ecac3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java @@ -21,6 +21,7 @@ import java.io.DataOutput; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Random; import org.junit.Test; @@ -173,7 +174,7 @@ private static void writeJunk(DataOutput out, Random r, long seed, int iter) out.writeUTF(string2); break; case 12: - byte[] bb = asciiString.getBytes("UTF-8"); + byte[] bb = asciiString.getBytes(StandardCharsets.UTF_8); out.write(bb); break; case 13: diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index e0dfeab..31c3a74 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -65,6 +65,7 @@ import org.mockito.Mockito; import com.google.common.collect.Lists; +import com.nimbusds.jose.util.StandardCharset; import java.io.File; import java.io.IOException; @@ -440,7 +441,7 @@ public void testNewBase() throws Exception { .thenReturn(true); Mockito.when(reader.getMetadataValue(OrcRecordUpdater.ACID_KEY_INDEX_NAME)) .thenReturn(ByteBuffer.wrap("10,20,30;40,50,60;40,50,61" - .getBytes("UTF-8"))); + .getBytes(StandardCharset.UTF_8))); Mockito.when(reader.getStripes()) .thenReturn(createStripes(2, 2, 1)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java index 06f27b5..52cde2d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java @@ -13,6 +13,7 @@ */ package org.apache.hadoop.hive.ql.io.parquet; +import java.nio.charset.StandardCharsets; import java.util.Properties; import junit.framework.TestCase; @@ -57,12 +58,12 @@ public void testParquetHiveSerDe() throws Throwable { arr[2] = new IntWritable(789); arr[3] = new LongWritable(1000l); arr[4] = new DoubleWritable((double) 5.3); - arr[5] = new BytesWritable("hive and hadoop and parquet. Big family.".getBytes("UTF-8")); - arr[6] = new BytesWritable("parquetSerde binary".getBytes("UTF-8")); + arr[5] = new BytesWritable("hive and hadoop and parquet. Big family.".getBytes(StandardCharsets.UTF_8)); + arr[6] = new BytesWritable("parquetSerde binary".getBytes(StandardCharsets.UTF_8)); final Writable[] map = new Writable[3]; for (int i = 0; i < 3; ++i) { final Writable[] pair = new Writable[2]; - pair[0] = new BytesWritable(("key_" + i).getBytes("UTF-8")); + pair[0] = new BytesWritable(("key_" + i).getBytes(StandardCharsets.UTF_8)); pair[1] = new IntWritable(i); map[i] = new ArrayWritable(Writable.class, pair); } @@ -70,7 +71,7 @@ public void testParquetHiveSerDe() throws Throwable { final Writable[] array = new Writable[5]; for (int i = 0; i < 5; ++i) { - array[i] = new BytesWritable(("elem_" + i).getBytes("UTF-8")); + array[i] = new BytesWritable(("elem_" + i).getBytes(StandardCharsets.UTF_8)); } arr[8] = new ArrayWritable(Writable.class, array); @@ -99,7 +100,7 @@ public void testParquetHiveSerDeComplexTypes() throws Throwable { // Generate test data Writable[] wb = new Writable[1]; - wb[0] = new BytesWritable("foo".getBytes("UTF-8")); + wb[0] = new BytesWritable("foo".getBytes(StandardCharsets.UTF_8)); Writable[] ws = new Writable[2]; ws[0] = null; ArrayWritable awb = new ArrayWritable(Writable.class, wb); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/TestSlidingFilenameRolloverStrategy.java b/ql/src/test/org/apache/hadoop/hive/ql/log/TestSlidingFilenameRolloverStrategy.java index a63d01a..e15cb04 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/log/TestSlidingFilenameRolloverStrategy.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/log/TestSlidingFilenameRolloverStrategy.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.log; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystems; @@ -113,7 +114,7 @@ public void testSlidingLogFiles() throws Exception { int count = 0; for (Path path : stream) { count++; - String contents = new String(Files.readAllBytes(path), "UTF-8"); + String contents = new String(Files.readAllBytes(path), StandardCharsets.UTF_8); // There should be one exception message per file assertTrue("File " + path + " did not have expected content", contents.contains(errorString)); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java index bc4e8d8..6cc1ed3 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.serde2; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Properties; import org.apache.hadoop.conf.Configuration; @@ -28,7 +29,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Charsets; /** * AbstractEncodingAwareSerDe aware the encoding from table properties, @@ -43,8 +43,8 @@ @Deprecated public void initialize(Configuration conf, Properties tbl) throws SerDeException { - charset = Charset.forName(tbl.getProperty(serdeConstants.SERIALIZATION_ENCODING, "UTF-8")); - if (this.charset.equals(Charsets.ISO_8859_1) || this.charset.equals(Charsets.US_ASCII)) { + charset = Charset.forName(tbl.getProperty(serdeConstants.SERIALIZATION_ENCODING, StandardCharsets.UTF_8.name())); + if (this.charset.equals(StandardCharsets.ISO_8859_1) || this.charset.equals(StandardCharsets.US_ASCII)) { LOG.warn("The data may not be properly converted to target charset " + charset); } } @@ -53,7 +53,7 @@ public void initialize(Configuration conf, Properties tbl) public final Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException { Writable result = doSerialize(obj, objInspector); - if (!this.charset.equals(Charsets.UTF_8)) { + if (!this.charset.equals(StandardCharsets.UTF_8)) { result = transformFromUTF8(result); } return result; @@ -70,7 +70,7 @@ public final Writable serialize(Object obj, ObjectInspector objInspector) @Override public final Object deserialize(Writable blob) throws SerDeException { - if (!this.charset.equals(Charsets.UTF_8)) { + if (!this.charset.equals(StandardCharsets.UTF_8)) { blob = transformToUTF8(blob); } return doDeserialize(blob); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestBytesRefArrayWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestBytesRefArrayWritable.java index df8d314..5352632 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestBytesRefArrayWritable.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestBytesRefArrayWritable.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.serde2.columnar; +import java.nio.charset.StandardCharsets; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -28,15 +30,15 @@ @Before public void setup() throws Exception { - left.set(0, new BytesRefWritable("123".getBytes("UTF-8"))); - left.set(1, new BytesRefWritable("456".getBytes("UTF-8"))); - left.set(2, new BytesRefWritable("789".getBytes("UTF-8"))); - left.set(3, new BytesRefWritable("1000".getBytes("UTF-8"))); - - right.set(0, new BytesRefWritable("123".getBytes("UTF-8"))); - right.set(1, new BytesRefWritable("456".getBytes("UTF-8"))); - right.set(2, new BytesRefWritable("289".getBytes("UTF-8"))); - right.set(3, new BytesRefWritable("1000".getBytes("UTF-8"))); + left.set(0, new BytesRefWritable("123".getBytes(StandardCharsets.UTF_8))); + left.set(1, new BytesRefWritable("456".getBytes(StandardCharsets.UTF_8))); + left.set(2, new BytesRefWritable("789".getBytes(StandardCharsets.UTF_8))); + left.set(3, new BytesRefWritable("1000".getBytes(StandardCharsets.UTF_8))); + + right.set(0, new BytesRefWritable("123".getBytes(StandardCharsets.UTF_8))); + right.set(1, new BytesRefWritable("456".getBytes(StandardCharsets.UTF_8))); + right.set(2, new BytesRefWritable("289".getBytes(StandardCharsets.UTF_8))); + right.set(3, new BytesRefWritable("1000".getBytes(StandardCharsets.UTF_8))); } @Test // HIVE-5839 diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java index a7873f2..10e03c5 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/fast/TestLazySimpleDeserializeRead.java @@ -19,6 +19,7 @@ import junit.framework.TestCase; +import java.nio.charset.StandardCharsets; import java.util.Properties; import org.apache.hadoop.hive.conf.HiveConf; @@ -61,7 +62,7 @@ public void testEscaping() throws Exception { // set and parse the row String s = "This\\nis\\rthe first\\r\\nmulti-line field\\n|field1-2"; - Text row = new Text(s.getBytes("UTF-8")); + Text row = new Text(s.getBytes(StandardCharsets.UTF_8)); deserializeRead.set(row.getBytes(), 0, row.getLength()); assertTrue(deserializeRead.readNextField()); @@ -77,7 +78,7 @@ public void testEscaping() throws Exception { field.set(externalBuffer, 0, externalBufferLen); String f = "This\nis\rthe first\r\nmulti-line field\n"; - Text escaped = new Text(f.getBytes("UTF-8")); + Text escaped = new Text(f.getBytes(StandardCharsets.UTF_8)); assertTrue("The escaped result is incorrect", field.compareTo(escaped) == 0); } diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index 747db58..ae40807 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -501,7 +501,7 @@ public String getTaskStatus() throws HiveSQLException { ObjectMapper mapper = new ObjectMapper(); out = new ByteArrayOutputStream(); mapper.writeValue(out, statuses); - return out.toString("UTF-8"); + return out.toString(StandardCharsets.UTF_8.name()); } catch (JsonGenerationException e) { throw new HiveSQLException(e); } catch (JsonMappingException e) { diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index fa61d3c..e6f9534 100644 --- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -22,6 +22,7 @@ import java.io.ByteArrayInputStream; import java.io.Serializable; +import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -663,7 +664,7 @@ public void testTaskStatus() throws Exception { String jsonTaskStatus = status.getTaskStatus(); assertNotNull(jsonTaskStatus); ObjectMapper mapper = new ObjectMapper(); - ByteArrayInputStream in = new ByteArrayInputStream(jsonTaskStatus.getBytes("UTF-8")); + ByteArrayInputStream in = new ByteArrayInputStream(jsonTaskStatus.getBytes(StandardCharsets.UTF_8)); List taskStatuses = mapper.readValue(in, new TypeReference>(){}); System.out.println("task statuses: " + jsonTaskStatus); // TaskDisplay doesn't have a toString, using json diff --git a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java index 402e737..4010a27 100644 --- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java +++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java @@ -24,6 +24,7 @@ import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.StandardCharsets; import org.apache.commons.io.IOUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -166,7 +167,7 @@ private String getURLResponseAsString(String baseURL) throws IOException { HttpURLConnection conn = (HttpURLConnection) url.openConnection(); Assert.assertEquals("Got an HTTP response code other thank OK.", HttpURLConnection.HTTP_OK, conn.getResponseCode()); StringWriter writer = new StringWriter(); - IOUtils.copy(conn.getInputStream(), writer, "UTF-8"); + IOUtils.copy(conn.getInputStream(), writer, StandardCharsets.UTF_8); return writer.toString(); } diff --git a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java index 0e1557e..2320fbe 100644 --- a/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java +++ b/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java @@ -37,6 +37,7 @@ import java.io.Serializable; import java.net.MalformedURLException; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.util.Arrays; import java.util.HashMap; @@ -243,7 +244,7 @@ public void call(SparkClient client) throws Exception { JarOutputStream jarFile = new JarOutputStream(new FileOutputStream(jar)); jarFile.putNextEntry(new ZipEntry("test.resource")); - jarFile.write("test resource".getBytes("UTF-8")); + jarFile.write("test resource".getBytes(StandardCharsets.UTF_8)); jarFile.closeEntry(); jarFile.close(); @@ -260,7 +261,7 @@ public void call(SparkClient client) throws Exception { file = File.createTempFile("test", ".file"); FileOutputStream fileStream = new FileOutputStream(file); - fileStream.write("test file".getBytes("UTF-8")); + fileStream.write("test file".getBytes(StandardCharsets.UTF_8)); fileStream.close(); client.addJar(new URI("file:" + file.getAbsolutePath())) @@ -473,7 +474,7 @@ public String call(Integer i) throws Exception { InputStream in = ccl.getResourceAsStream("test.resource"); byte[] bytes = ByteStreams.toByteArray(in); in.close(); - return new String(bytes, 0, bytes.length, "UTF-8"); + return new String(bytes, 0, bytes.length, StandardCharsets.UTF_8); } } @@ -496,7 +497,7 @@ public String call(Integer i) throws Exception { InputStream in = new FileInputStream(SparkFiles.get(fileName)); byte[] bytes = ByteStreams.toByteArray(in); in.close(); - return new String(bytes, 0, bytes.length, "UTF-8"); + return new String(bytes, 0, bytes.length, StandardCharsets.UTF_8); } } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageBuilder.java index e3a91f9..9b2d79b 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageBuilder.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageBuilder.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.metastore.messaging; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; @@ -317,34 +318,34 @@ private long now() { public static String createPrimaryKeyObjJson(SQLPrimaryKey primaryKeyObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(primaryKeyObj, "UTF-8"); + return serializer.toString(primaryKeyObj, StandardCharsets.UTF_8.name()); } public static String createForeignKeyObjJson(SQLForeignKey foreignKeyObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(foreignKeyObj, "UTF-8"); + return serializer.toString(foreignKeyObj, StandardCharsets.UTF_8.name()); } public static String createUniqueConstraintObjJson(SQLUniqueConstraint uniqueConstraintObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(uniqueConstraintObj, "UTF-8"); + return serializer.toString(uniqueConstraintObj, StandardCharsets.UTF_8.name()); } public static String createNotNullConstraintObjJson(SQLNotNullConstraint notNullConstaintObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(notNullConstaintObj, "UTF-8"); + return serializer.toString(notNullConstaintObj, StandardCharsets.UTF_8.name()); } public static String createDatabaseObjJson(Database dbObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(dbObj, "UTF-8"); + return serializer.toString(dbObj, StandardCharsets.UTF_8.name()); } public static String createCatalogObjJson(Catalog catObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(catObj, "UTF-8"); + return serializer.toString(catObj, StandardCharsets.UTF_8.name()); } public static String createTableObjJson(Table tableObj) throws TException { @@ -352,7 +353,7 @@ public static String createTableObjJson(Table tableObj) throws TException { // any pattern provided through EVENT_NOTIFICATION_PARAMETERS_EXCLUDE_PATTERNS filterMapkeys(tableObj.getParameters(), paramsFilter); TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(tableObj, "UTF-8"); + return serializer.toString(tableObj, StandardCharsets.UTF_8.name()); } public static String createPartitionObjJson(Partition partitionObj) throws TException { @@ -360,25 +361,25 @@ public static String createPartitionObjJson(Partition partitionObj) throws TExce // any pattern provided through EVENT_NOTIFICATION_PARAMETERS_EXCLUDE_PATTERNS filterMapkeys(partitionObj.getParameters(), paramsFilter); TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(partitionObj, "UTF-8"); + return serializer.toString(partitionObj, StandardCharsets.UTF_8.name()); } public static String createFunctionObjJson(Function functionObj) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(functionObj, "UTF-8"); + return serializer.toString(functionObj, StandardCharsets.UTF_8.name()); } public static Table getTableObj(ObjectNode jsonTree) throws Exception { TDeserializer deSerializer = new TDeserializer(new TJSONProtocol.Factory()); Table tableObj = new Table(); String tableJson = jsonTree.get("tableObjJson").asText(); - deSerializer.deserialize(tableObj, tableJson, "UTF-8"); + deSerializer.deserialize(tableObj, tableJson, StandardCharsets.UTF_8.name()); return tableObj; } public static String createTableColumnStatJson(ColumnStatistics tableColumnStat) throws TException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); - return serializer.toString(tableColumnStat, "UTF-8"); + return serializer.toString(tableColumnStat, StandardCharsets.UTF_8.name()); } /* @@ -405,7 +406,7 @@ public static TBase getTObj(String tSerialized, Class objClass) throws Exception { TDeserializer thriftDeSerializer = new TDeserializer(new TJSONProtocol.Factory()); TBase obj = objClass.newInstance(); - thriftDeSerializer.deserialize(obj, tSerialized, "UTF-8"); + thriftDeSerializer.deserialize(obj, tSerialized, StandardCharsets.UTF_8.name()); return obj; } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java index 2fac79f..239b26e 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetastoreSchemaTool.java @@ -44,6 +44,7 @@ import java.io.OutputStream; import java.io.PrintStream; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.SQLException; @@ -321,7 +322,7 @@ protected void execSql(String sqlScriptFile) throws IOException { SqlLine.Status status = sqlLine.begin(builder.buildToRun(), null, false); if (LOG.isDebugEnabled() && outputForLog != null) { LOG.debug("Received following output from Sqlline:"); - LOG.debug(outputForLog.toString("UTF-8")); + LOG.debug(outputForLog.toString(StandardCharsets.UTF_8.name())); } if (status != SqlLine.Status.OK) { throw new IOException("Schema script failed, errorcode " + status); diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java index fd84169..af006e3 100644 --- a/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java +++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.util.concurrent.TimeUnit; import org.apache.commons.cli.CommandLine; @@ -223,7 +224,7 @@ private long printLogs(String testHandle, long offset) if(statusLine.getStatusCode() != 200) { throw new IllegalStateException(statusLine.getStatusCode() + " " + statusLine.getReasonPhrase()); } - String response = EntityUtils.toString(httpResponse.getEntity(), "UTF-8"); + String response = EntityUtils.toString(httpResponse.getEntity(), StandardCharsets.UTF_8); @SuppressWarnings("unchecked") S result = (S)endPointResponse. getResponseClass().cast(mMapper.readValue(response, endPointResponse.getResponseClass())); diff --git a/upgrade-acid/pre-upgrade/src/main/java/org/apache/hadoop/hive/upgrade/acid/PreUpgradeTool.java b/upgrade-acid/pre-upgrade/src/main/java/org/apache/hadoop/hive/upgrade/acid/PreUpgradeTool.java index 04782a6..895b7c4 100644 --- a/upgrade-acid/pre-upgrade/src/main/java/org/apache/hadoop/hive/upgrade/acid/PreUpgradeTool.java +++ b/upgrade-acid/pre-upgrade/src/main/java/org/apache/hadoop/hive/upgrade/acid/PreUpgradeTool.java @@ -65,8 +65,8 @@ import java.io.PrintWriter; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -493,8 +493,7 @@ private static long getDataSize(Path location, HiveConf conf) throws IOException } - private static final Charset utf8 = Charset.forName("UTF-8"); - private static final CharsetDecoder utf8Decoder = utf8.newDecoder(); + private static final CharsetDecoder utf8Decoder = StandardCharsets.UTF_8.newDecoder(); private static final String ACID_STATS = "hive.acid.stats"; private static boolean needsCompaction(FileStatus bucket, FileSystem fs) throws IOException {