From daae241a7ec2c551d86b2b01d4ad32f9c8d7f94d Mon Sep 17 00:00:00 2001 From: Ivan Suller Date: Thu, 7 Feb 2019 11:51:27 +0100 Subject: [PATCH] HIVE-21228: --- .../java/org/apache/hive/beeline/BeeLine.java | 4 +- .../org/apache/hive/beeline/Reflector.java | 14 +++---- .../hive/hcatalog/data/HCatRecordSerDe.java | 4 +- .../hcatalog/data/TestLazyHCatRecord.java | 16 ++++---- .../hcatalog/pig/TestHCatLoaderStorer.java | 8 ++-- .../hive/hplsql/functions/FunctionMisc.java | 38 ++++++++++++------- .../TestIncrementalObjectSizeEstimator.java | 18 ++++----- .../hadoop/hive/ql/exec/GroupByOperator.java | 4 +- .../apache/hadoop/hive/ql/exec/Operator.java | 5 ++- .../hadoop/hive/ql/exec/TaskFactory.java | 2 +- .../hive/ql/exec/mr/HadoopJobExecHelper.java | 2 +- .../ql/exec/tez/CustomPartitionVertex.java | 2 +- .../zookeeper/ZooKeeperHiveLockManager.java | 2 +- .../ql/optimizer/AbstractBucketJoinProc.java | 4 +- .../hive/ql/optimizer/GenMapRedUtils.java | 2 +- .../calcite/translator/ASTConverter.java | 4 +- .../annotation/StatsRulesProcFactory.java | 20 +++++----- .../hadoop/hive/ql/parse/CalcitePlanner.java | 6 +-- .../hive/ql/parse/SemanticAnalyzer.java | 6 +-- .../apache/hadoop/hive/ql/plan/LimitDesc.java | 2 +- .../hive/ql/processors/CryptoProcessor.java | 6 +-- .../ql/udf/generic/GenericUDTFPosExplode.java | 2 +- .../exec/vector/TestVectorizationContext.java | 22 +++++------ .../ql/exec/vector/VectorRandomRowSource.java | 4 +- .../apache/hadoop/hive/ql/io/TestRCFile.java | 3 +- .../ql/io/orc/TestNewInputOutputFormat.java | 12 +++--- .../ql/optimizer/physical/TestVectorizer.java | 4 +- .../ql/udf/generic/TestGenericUDFDateAdd.java | 8 ++-- .../ql/udf/generic/TestGenericUDFDateSub.java | 8 ++-- .../hive/serde2/SerdeRandomRowSource.java | 4 +- .../thrift_test/CreateSequenceFile.java | 3 +- .../hadoop/hive/metastore/HiveMetaStore.java | 3 +- 32 files changed, 123 insertions(+), 119 deletions(-) diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java index e54e818b69..65eee2c2bb 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -486,7 +486,7 @@ String loc(String res, int param) { try { return MessageFormat.format( new ChoiceFormat(resourceBundle.getString(res)).format(param), - new Object[] {new Integer(param)}); + new Object[] {Integer.valueOf(param)}); } catch (Exception e) { return res + ": " + param; } @@ -2176,7 +2176,7 @@ void handleSQLException(SQLException e) { new Object[] { e.getMessage() == null ? "" : e.getMessage().trim(), e.getSQLState() == null ? "" : e.getSQLState().trim(), - new Integer(e.getErrorCode())})); + Integer.valueOf(e.getErrorCode())})); if (getOpts().getVerbose()) { e.printStackTrace(getErrorStream()); diff --git a/beeline/src/java/org/apache/hive/beeline/Reflector.java b/beeline/src/java/org/apache/hive/beeline/Reflector.java index 455c11c930..1434d94212 100644 --- a/beeline/src/java/org/apache/hive/beeline/Reflector.java +++ b/beeline/src/java/org/apache/hive/beeline/Reflector.java @@ -110,19 +110,19 @@ public static Object convert(Object ob, Class toType) } else if (toType == Byte.class || toType == byte.class) { return Byte.valueOf(ob.toString()); } else if (toType == Character.class || toType == char.class) { - return new Character(ob.toString().charAt(0)); + return Character.valueOf(ob.toString().charAt(0)); } else if (toType == Short.class || toType == short.class) { - return new Short(ob.toString()); + return Short.valueOf(ob.toString()); } else if (toType == Integer.class || toType == int.class) { - return new Integer(ob.toString()); + return Integer.valueOf(ob.toString()); } else if (toType == Long.class || toType == long.class) { - return new Long(ob.toString()); + return Long.valueOf(ob.toString()); } else if (toType == Double.class || toType == double.class) { - return new Double(ob.toString()); + return Double.valueOf(ob.toString()); } else if (toType == Float.class || toType == float.class) { - return new Float(ob.toString()); + return Float.valueOf(ob.toString()); } else if (toType == Boolean.class || toType == boolean.class) { - return new Boolean(ob.toString().equals("true") + return Boolean.valueOf(ob.toString().equals("true") || ob.toString().equals(true + "") || ob.toString().equals("1") || ob.toString().equals("on") diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java index 0b16b83673..110769a1ba 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java @@ -288,12 +288,12 @@ private static Object serializePrimitiveField(Object field, conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { - return new Integer((Short) f); + return Integer.valueOf((Short) f); } else if (f instanceof Byte && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { - return new Integer((Byte) f); + return Integer.valueOf((Byte) f); } } diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestLazyHCatRecord.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestLazyHCatRecord.java index b65eeb0dae..6a5a962f40 100644 --- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestLazyHCatRecord.java +++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestLazyHCatRecord.java @@ -43,7 +43,7 @@ public void testGet() throws Exception { Assert.assertEquals(INT_CONST, ((Integer) r.get(0)).intValue()); Assert.assertEquals(LONG_CONST, ((Long) r.get(1)).longValue()); Assert.assertEquals(DOUBLE_CONST, ((Double) r.get(2)).doubleValue(), 0); - Assert.assertEquals(STRING_CONST, (String) r.get(3)); + Assert.assertEquals(STRING_CONST, r.get(3)); } @Test @@ -55,7 +55,7 @@ public void testGetWithName() throws Exception { Assert.assertEquals(INT_CONST, ((Integer) r.get("an_int", schema)).intValue()); Assert.assertEquals(LONG_CONST, ((Long) r.get("a_long", schema)).longValue()); Assert.assertEquals(DOUBLE_CONST, ((Double) r.get("a_double", schema)).doubleValue(), 0); - Assert.assertEquals(STRING_CONST, (String) r.get("a_string", schema)); + Assert.assertEquals(STRING_CONST, r.get("a_string", schema)); } @Test @@ -65,7 +65,7 @@ public void testGetAll() throws Exception { Assert.assertEquals(INT_CONST, ((Integer) list.get(0)).intValue()); Assert.assertEquals(LONG_CONST, ((Long) list.get(1)).longValue()); Assert.assertEquals(DOUBLE_CONST, ((Double) list.get(2)).doubleValue(), 0); - Assert.assertEquals(STRING_CONST, (String) list.get(3)); + Assert.assertEquals(STRING_CONST, list.get(3)); } @Test @@ -152,16 +152,16 @@ public void testGetWritable() throws Exception { Assert.assertEquals(INT_CONST, ((Integer) r.get(0)).intValue()); Assert.assertEquals(LONG_CONST, ((Long) r.get(1)).longValue()); Assert.assertEquals(DOUBLE_CONST, ((Double) r.get(2)).doubleValue(), 0); - Assert.assertEquals(STRING_CONST, (String) r.get(3)); + Assert.assertEquals(STRING_CONST, r.get(3)); Assert.assertEquals("org.apache.hive.hcatalog.data.DefaultHCatRecord", r.getClass().getName()); } private HCatRecord getHCatRecord() throws Exception { List rec_1 = new ArrayList(4); - rec_1.add( new Integer(INT_CONST)); - rec_1.add( new Long(LONG_CONST)); - rec_1.add( new Double(DOUBLE_CONST)); - rec_1.add( new String(STRING_CONST)); + rec_1.add(Integer.valueOf(INT_CONST)); + rec_1.add(Long.valueOf(LONG_CONST)); + rec_1.add(Double.valueOf(DOUBLE_CONST)); + rec_1.add(STRING_CONST); return new DefaultHCatRecord(rec_1); } diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java index 38e1e7e1ff..281754be0d 100644 --- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java +++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java @@ -141,11 +141,11 @@ public void testSmallTinyInt() throws Exception { // Ensure Pig can read data correctly. Iterator it = server.openIterator("data"); Tuple t = it.next(); - Assert.assertEquals(new Integer(Short.MIN_VALUE), t.get(0)); - Assert.assertEquals(new Integer(Byte.MIN_VALUE), t.get(1)); + Assert.assertEquals(Integer.valueOf(Short.MIN_VALUE), t.get(0)); + Assert.assertEquals(Integer.valueOf(Byte.MIN_VALUE), t.get(1)); t = it.next(); - Assert.assertEquals(new Integer(Short.MAX_VALUE), t.get(0)); - Assert.assertEquals(new Integer(Byte.MAX_VALUE), t.get(1)); + Assert.assertEquals(Integer.valueOf(Short.MAX_VALUE), t.get(0)); + Assert.assertEquals(Integer.valueOf(Byte.MAX_VALUE), t.get(1)); Assert.assertFalse(it.hasNext()); // Ensure Pig can write correctly to smallint/tinyint columns. This means values within the diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java index 24081a925d..81eb5721b2 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java @@ -36,20 +36,32 @@ public FunctionMisc(Exec e) { */ @Override public void register(Function f) { - f.map.put("COALESCE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }}); - f.map.put("DECODE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { decode(ctx); }}); - f.map.put("NVL", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }}); - f.map.put("NVL2", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl2(ctx); }}); - f.map.put("PART_COUNT_BY", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { partCountBy(ctx); }}); + f.map.put("COALESCE", new FuncCommand() { @Override + public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }}); + f.map.put("DECODE", new FuncCommand() { @Override + public void run(HplsqlParser.Expr_func_paramsContext ctx) { decode(ctx); }}); + f.map.put("NVL", new FuncCommand() { @Override + public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }}); + f.map.put("NVL2", new FuncCommand() { @Override + public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl2(ctx); }}); + f.map.put("PART_COUNT_BY", new FuncCommand() { @Override + public void run(HplsqlParser.Expr_func_paramsContext ctx) { partCountBy(ctx); }}); - f.specMap.put("ACTIVITY_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { activityCount(ctx); }}); - f.specMap.put("CAST", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { cast(ctx); }}); - f.specMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { current(ctx); }}); - f.specMap.put("CURRENT_USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }}); - f.specMap.put("PART_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { partCount(ctx); }}); - f.specMap.put("USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }}); + f.specMap.put("ACTIVITY_COUNT", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { activityCount(ctx); }}); + f.specMap.put("CAST", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { cast(ctx); }}); + f.specMap.put("CURRENT", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { current(ctx); }}); + f.specMap.put("CURRENT_USER", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }}); + f.specMap.put("PART_COUNT", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { partCount(ctx); }}); + f.specMap.put("USER", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }}); - f.specSqlMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentSql(ctx); }}); + f.specSqlMap.put("CURRENT", new FuncSpecCommand() { @Override + public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentSql(ctx); }}); } /** @@ -288,7 +300,7 @@ public void partCountBy(HplsqlParser.Expr_func_paramsContext ctx) { } Integer count = group.get(key); if (count == null) { - count = new Integer(0); + count = Integer.valueOf(0); } group.put(key, count + 1); } diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java index 652a29f1d5..60eaab6d9d 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java @@ -225,22 +225,22 @@ public void testMetadata() throws IOException { @Test public void testSimpleTypes() { JavaDataModel memModel = JavaDataModel.get(); - int intSize = runEstimate(new Integer(0), memModel, null); - runEstimate(new String(""), memModel, "empty string"); - runEstimate(new String("foobarzzzzzzzzzzzzzz"), memModel, null); + int intSize = runEstimate(Integer.valueOf(0), memModel, null); + runEstimate("", memModel, "empty string"); + runEstimate("foobarzzzzzzzzzzzzzz", memModel, null); List list = new ArrayList(0); runEstimate(list, memModel, "empty ArrayList"); - list.add(new String("zzz")); + list.add("zzz"); runEstimate(list, memModel, "ArrayList - one string"); - list.add(new Integer(5)); - list.add(new Integer(6)); + list.add(Integer.valueOf(5)); + list.add(Integer.valueOf(6)); int arrayListSize = runEstimate(list, memModel, "ArrayList - 3 elements"); LinkedHashSet list2 = new LinkedHashSet(0); runEstimate(list2, memModel, "empty LinkedHashSet"); - list2.add(new String("zzzz")); + list2.add("zzzz"); runEstimate(list2, memModel, "LinkedHashSet - one string"); - list2.add(new Integer(7)); - list2.add(new Integer(4)); + list2.add(Integer.valueOf(7)); + list2.add(Integer.valueOf(4)); int lhsSize = runEstimate(list2, memModel, "LinkedHashSet - 3 elements"); Struct struct = new Struct(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java index 4882e61e12..e2feb3f988 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java @@ -466,10 +466,10 @@ private int getSize(int pos, PrimitiveCategory category) { case DOUBLE: return javaSizePrimitiveType; case STRING: - keyPositionsSize.add(new Integer(pos)); + keyPositionsSize.add(Integer.valueOf(pos)); return javaObjectOverHead; case BINARY: - keyPositionsSize.add(new Integer(pos)); + keyPositionsSize.add(Integer.valueOf(pos)); return javaObjectOverHead; case TIMESTAMP: case TIMESTAMPLOCALTZ: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index 380b60325f..b1970412f2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -1021,10 +1021,11 @@ public String dump(int level) { } public String dump(int level, HashSet seenOpts) { - if (seenOpts.contains(new Integer(id))) { + Integer idObj = Integer.valueOf(id); + if (seenOpts.contains(idObj)) { return null; } - seenOpts.add(new Integer(id)); + seenOpts.add(idObj); StringBuilder s = new StringBuilder(); String ls = getLevelString(level); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java index 40cc5761a9..8e6fdc0707 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java @@ -129,7 +129,7 @@ protected Integer initialValue() { public static int getAndIncrementId() { int curValue = tid.get().intValue(); - tid.set(new Integer(curValue + 1)); + tid.set(Integer.valueOf(curValue + 1)); return curValue; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index cb8d81ae15..9ec820d500 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -631,7 +631,7 @@ private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { for (TaskCompletionEvent taskCompletion : taskCompletions) { if (!taskCompletion.isMapTask()) { - reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime())); + reducersRunTimes.add(Integer.valueOf(taskCompletion.getTaskRunTime())); } } // Compute the reducers run time statistics for the job diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java index bd1f54ddf7..dfabfb81e5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java @@ -147,7 +147,7 @@ public void onVertexStarted(Map> completions) { List scheduledTasks = new ArrayList(numTasks); for (int i = 0; i < numTasks; ++i) { - scheduledTasks.add(new VertexManagerPluginContext.TaskWithLocationHint(new Integer(i), null)); + scheduledTasks.add(new VertexManagerPluginContext.TaskWithLocationHint(Integer.valueOf(i), null)); } context.scheduleVertexTasks(scheduledTasks); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java index 141cbc9dae..64f6c27846 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java @@ -738,7 +738,7 @@ public void close() throws LockException { private int getSequenceNumber(String resPath, String path) { String tst = resPath.substring(path.length()); try { - return (new Integer(tst)).intValue(); + return Integer.parseInt(tst); } catch (Exception e) { return -1; // invalid number } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java index 947c18098a..3a5b334716 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java @@ -316,7 +316,7 @@ protected boolean checkConvertBucketMapJoin( } List fileNames = getBucketFilePathsOfPartition(tbl.getDataLocation(), pGraphContext); - Integer num = new Integer(tbl.getNumBuckets()); + int num = tbl.getNumBuckets(); // The number of files for the table should be same as number of buckets. if (fileNames.size() != 0 && fileNames.size() != num) { @@ -332,7 +332,7 @@ protected boolean checkConvertBucketMapJoin( bigTblPartsToBucketNumber.put(null, tbl.getNumBuckets()); bigTablePartitioned = false; } else { - tblAliasToNumberOfBucketsInEachPartition.put(alias, Arrays.asList(num)); + tblAliasToNumberOfBucketsInEachPartition.put(alias, Arrays.asList(Integer.valueOf(num))); tblAliasToBucketedFilePathsInEachPartition.put(alias, Arrays.asList(fileNames)); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 2131bf131d..bb575d37d8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -420,7 +420,7 @@ static void splitPlan(ReduceSinkOperator cRS, GenMRProcContext opProcCtx) childPlan.setReduceWork(rWork); rWork.setReducer(reducer); ReduceSinkDesc desc = cRS.getConf(); - childPlan.getReduceWork().setNumReduceTasks(new Integer(desc.getNumReducers())); + childPlan.getReduceWork().setNumReduceTasks(Integer.valueOf(desc.getNumReducers())); opProcCtx.getOpTaskMap().put(reducer, childTask); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java index 141ebe542e..6163bba3f3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java @@ -323,7 +323,7 @@ private void convertOrderLimitToASTNode(HiveSortLimit order) { RexNode fetchExpr = hiveSortLimit.getFetchExpr(); if (fetchExpr != null) { Object offset = (offsetExpr == null) ? - new Integer(0) : ((RexLiteral) offsetExpr).getValue2(); + Integer.valueOf(0) : ((RexLiteral) offsetExpr).getValue2(); Object fetch = ((RexLiteral) fetchExpr).getValue2(); hiveAST.limit = ASTBuilder.limit(offset, fetch); } @@ -765,7 +765,7 @@ public QueryBlockInfo(Schema schema, ASTNode ast) { } Schema(DruidQuery dq) { - HiveTableScan hts = (HiveTableScan) ((DruidQuery)dq).getTableScan(); + HiveTableScan hts = (HiveTableScan) dq.getTableScan(); String tabName = hts.getTableAlias(); for (RelDataTypeField field : dq.getRowType().getFieldList()) { add(new ColumnInfo(tabName, field.getName())); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index d8b57831f3..6aeb2a856f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -610,7 +610,7 @@ public RangeResult intersect(ExprNodeDesc exprNode) { return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); } case serdeConstants.SMALLINT_TYPE_NAME: { - short value = new Short(boundValue); + short value = Short.parseShort(boundValue); short maxValue = range.maxValue.shortValue(); short minValue = range.minValue.shortValue(); return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); @@ -623,25 +623,25 @@ public RangeResult intersect(ExprNodeDesc exprNode) { return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); } case serdeConstants.INT_TYPE_NAME: { - int value = new Integer(boundValue); + int value = Integer.parseInt(boundValue); int maxValue = range.maxValue.intValue(); int minValue = range.minValue.intValue(); return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); } case serdeConstants.BIGINT_TYPE_NAME: { - long value = new Long(boundValue); + long value = Long.parseLong(boundValue); long maxValue = range.maxValue.longValue(); long minValue = range.minValue.longValue(); return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); } case serdeConstants.FLOAT_TYPE_NAME: { - float value = new Float(boundValue); + float value = Float.parseFloat(boundValue); float maxValue = range.maxValue.floatValue(); float minValue = range.minValue.floatValue(); return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); } case serdeConstants.DOUBLE_TYPE_NAME: { - double value = new Double(boundValue); + double value = Double.parseDouble(boundValue); double maxValue = range.maxValue.doubleValue(); double minValue = range.minValue.doubleValue(); return RangeResult.of(value < minValue, value < maxValue, value == minValue, value == maxValue); @@ -971,7 +971,7 @@ private long evaluateComparator(Statistics stats, AnnotateStatsProcCtx aspCtx, E } } } else if (colTypeLowerCase.equals(serdeConstants.SMALLINT_TYPE_NAME)) { - short value = new Short(boundValue); + short value = Short.parseShort(boundValue); short maxValue = cs.getRange().maxValue.shortValue(); short minValue = cs.getRange().minValue.shortValue(); if (upperBound) { @@ -996,7 +996,7 @@ private long evaluateComparator(Statistics stats, AnnotateStatsProcCtx aspCtx, E DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(boundValue)); value = writableVal.getDays(); } else { - value = new Integer(boundValue); + value = Integer.parseInt(boundValue); } // Date is an integer internally int maxValue = cs.getRange().maxValue.intValue(); @@ -1017,7 +1017,7 @@ private long evaluateComparator(Statistics stats, AnnotateStatsProcCtx aspCtx, E } } } else if (colTypeLowerCase.equals(serdeConstants.BIGINT_TYPE_NAME)) { - long value = new Long(boundValue); + long value = Long.parseLong(boundValue); long maxValue = cs.getRange().maxValue.longValue(); long minValue = cs.getRange().minValue.longValue(); if (upperBound) { @@ -1036,7 +1036,7 @@ private long evaluateComparator(Statistics stats, AnnotateStatsProcCtx aspCtx, E } } } else if (colTypeLowerCase.equals(serdeConstants.FLOAT_TYPE_NAME)) { - float value = new Float(boundValue); + float value = Float.parseFloat(boundValue); float maxValue = cs.getRange().maxValue.floatValue(); float minValue = cs.getRange().minValue.floatValue(); if (upperBound) { @@ -1055,7 +1055,7 @@ private long evaluateComparator(Statistics stats, AnnotateStatsProcCtx aspCtx, E } } } else if (colTypeLowerCase.equals(serdeConstants.DOUBLE_TYPE_NAME)) { - double value = new Double(boundValue); + double value = Double.parseDouble(boundValue); double maxValue = cs.getRange().maxValue.doubleValue(); double minValue = cs.getRange().minValue.doubleValue(); if (upperBound) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 47d6e7c146..123545c89f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -3125,10 +3125,6 @@ private void subqueryRestrictionCheck(QB qb, ASTNode searchCond, RelNode srcRel, ASTNode subQueryAST = subQueries.get(i); //SubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST); - Boolean orInSubquery = new Boolean(false); - Integer subqueryCount = new Integer(0); - ObjectPair subqInfo = new ObjectPair(false, 0); - ASTNode outerQueryExpr = (ASTNode) subQueryAST.getChild(2); if (outerQueryExpr != null && outerQueryExpr.getType() == HiveParser.TOK_SUBQUERY_EXPR) { @@ -4178,7 +4174,7 @@ private RexWindowBound getBound(BoundarySpec bs, RexNodeConverter converter) { SqlCall sc = null; if (amt != null) - amtLiteral = cluster.getRexBuilder().makeLiteral(new Integer(bs.getAmt()), + amtLiteral = cluster.getRexBuilder().makeLiteral(Integer.valueOf(bs.getAmt()), cluster.getTypeFactory().createSqlType(SqlTypeName.INTEGER), true); switch (bs.getDirection()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 0d0196ee1a..1353d5a0da 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -1761,11 +1761,9 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1, PlannerContext plan case HiveParser.TOK_LIMIT: if (ast.getChildCount() == 2) { qbp.setDestLimit(ctx_1.dest, - new Integer(ast.getChild(0).getText()), - new Integer(ast.getChild(1).getText())); + Integer.valueOf(ast.getChild(0).getText()), Integer.valueOf(ast.getChild(1).getText())); } else { - qbp.setDestLimit(ctx_1.dest, new Integer(0), - new Integer(ast.getChild(0).getText())); + qbp.setDestLimit(ctx_1.dest, Integer.valueOf(0), Integer.valueOf(ast.getChild(0).getText())); } break; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java index 698af94092..27287019f3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java @@ -52,7 +52,7 @@ public LimitDesc(final int offset, final int limit) { */ @Explain(displayName = "Offset of rows", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public Integer getOffset() { - return (offset == 0) ? null : new Integer(offset); + return (offset == 0) ? null : Integer.valueOf(offset); } public void setOffset(Integer offset) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java index d2a864a9be..98af7bec20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java @@ -125,7 +125,7 @@ private void createEncryptionKey(String[] params) throws Exception { String bitLength = args.getOptionValue("bitLength", Integer.toString(DEFAULT_BIT_LENGTH)); try { - encryptionShim.createKey(keyName, new Integer(bitLength)); + encryptionShim.createKey(keyName, Integer.parseInt(bitLength)); } catch (Exception e) { throw new Exception("Cannot create encryption key: " + e.getMessage()); } @@ -144,10 +144,6 @@ private void createEncryptionZone(String[] params) throws Exception { String keyName = args.getOptionValue("keyName"); Path cryptoZone = new Path(args.getOptionValue("path")); - if (cryptoZone == null) { - throw new Exception("Cannot create encryption zone: Invalid path '" - + args.getOptionValue("path") + "'"); - } try { encryptionShim.createEncryptionZone(cryptoZone, keyName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java index fde76475e7..9beec239ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFPosExplode.java @@ -79,7 +79,7 @@ public void process(Object[] o) throws HiveException { for (int i = 0; i < list.size(); i++) { Object r = list.get(i); - forwardObj[0] = new Integer(i); + forwardObj[0] = Integer.valueOf(i); forwardObj[1] = r; forward(forwardObj); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java index 438107fd0c..1fa268d7af 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java @@ -440,7 +440,7 @@ public void testFilterStringColCompareStringColumnExpressions() throws HiveExcep @Test public void testFloatInExpressions() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Float.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPPlus udf = new GenericUDFOPPlus(); @@ -462,7 +462,7 @@ public void testFloatInExpressions() throws HiveException { @Test public void testVectorizeFilterAndOrExpression() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); @@ -523,7 +523,7 @@ public void testVectorizeFilterAndOrExpression() throws HiveException { @Test public void testVectorizeFilterMultiAndOrExpression() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); @@ -547,7 +547,7 @@ public void testVectorizeFilterMultiAndOrExpression() throws HiveException { lessExprDesc.setChildren(children2); ExprNodeColumnDesc col3Expr = new ExprNodeColumnDesc(Integer.class, "col3", "table", false); - ExprNodeConstantDesc const3Desc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc const3Desc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf3 = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc3 = new ExprNodeGenericFuncDesc(); @@ -601,7 +601,7 @@ public void testVectorizeFilterMultiAndOrExpression() throws HiveException { @Test public void testVectorizeAndOrProjectionExpression() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); @@ -665,7 +665,7 @@ public void testVectorizeAndOrProjectionExpression() throws HiveException { @Test public void testNotExpression() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); @@ -703,7 +703,7 @@ public void testNotExpression() throws HiveException { @Test public void testNullExpressions() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); @@ -744,7 +744,7 @@ public void testNullExpressions() throws HiveException { @Test public void testNotNullExpressions() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); @@ -802,7 +802,7 @@ public void testVectorizeScalarColumnExpression() throws HiveException { @Test public void testFilterWithNegativeScalar() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(-10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(-10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(); @@ -1286,8 +1286,8 @@ public void testIfConditionalExprs() throws HiveException { ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(Long.class, "col2", "table", false); ExprNodeColumnDesc col3Expr = new ExprNodeColumnDesc(Long.class, "col3", "table", false); - ExprNodeConstantDesc constDesc2 = new ExprNodeConstantDesc(new Integer(1)); - ExprNodeConstantDesc constDesc3 = new ExprNodeConstantDesc(new Integer(2)); + ExprNodeConstantDesc constDesc2 = new ExprNodeConstantDesc(Integer.valueOf(1)); + ExprNodeConstantDesc constDesc3 = new ExprNodeConstantDesc(Integer.valueOf(2)); // long column/column IF GenericUDFIf udf = new GenericUDFIf(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java index a2febe4919..ecd381d514 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java @@ -663,7 +663,7 @@ private void chooseSchema(SupportedTypes supportedTypes, Set allowedType typeNum = r.nextInt(maxTypeNum); - Integer typeNumInteger = new Integer(typeNum); + Integer typeNumInteger = Integer.valueOf(typeNum); if (!hashSet.contains(typeNumInteger)) { hashSet.add(typeNumInteger); break; @@ -943,7 +943,7 @@ public static Object randomStringFamily(Random random, TypeInfo typeInfo, } longWritable.set( (Long) VectorRandomRowSource.randomPrimitiveObject( - r, (PrimitiveTypeInfo) TypeInfoFactory.longTypeInfo)); + r, TypeInfoFactory.longTypeInfo)); } } object = longWritable; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java index 04695d3e88..7b4cf4d6d0 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java @@ -346,8 +346,7 @@ public void testReadCorruptFile() throws IOException, SerDeException { Random rand = new Random(); for (int recIdx = 0; recIdx < recCount; recIdx++) { for (int i = 0; i < record.length; i++) { - record[i] = new Integer(rand.nextInt()).toString() - .getBytes(StandardCharsets.UTF_8); + record[i] = String.valueOf(rand.nextInt()).getBytes(StandardCharsets.UTF_8); } for (int i = 0; i < record.length; i++) { BytesRefWritable cu = new BytesRefWritable(record[i], 0, diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java index 78cc4323fb..ee45600793 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestNewInputOutputFormat.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.ql.io.orc; -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertTrue; -import static junit.framework.Assert.assertFalse; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.BufferedReader; import java.io.File; @@ -383,7 +383,7 @@ public void testNewOutputFormatComplex() throws Exception { assertEquals(6, ((List)list.get(0)).get(1)); Map map = (Map)converted.get(3); assertEquals(map.size(), 1); - assertEquals(map.get("saving"), new Integer(1)); + assertEquals(map.get("saving"), Integer.valueOf(1)); row = rows.next(null); converted = (List)converter.convert(row); @@ -395,7 +395,7 @@ public void testNewOutputFormatComplex() throws Exception { assertEquals(9, ((List)list.get(0)).get(1)); map = (Map)converted.get(3); assertEquals(map.size(), 11); - assertEquals(map.get("the"), new Integer(2)); + assertEquals(map.get("the"), Integer.valueOf(2)); row = rows.next(null); converted = (List)converter.convert(row); @@ -407,7 +407,7 @@ public void testNewOutputFormatComplex() throws Exception { assertEquals(4, ((List)list.get(0)).get(1)); map = (Map)converted.get(3); assertEquals(map.size(), 13); - assertEquals(map.get("were"), new Integer(3)); + assertEquals(map.get("were"), Integer.valueOf(3)); assertFalse(rows.hasNext()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java index 2a2bbe1d55..e0d292c7be 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.Map; -import junit.framework.Assert; +import org.junit.Assert; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.*; @@ -145,7 +145,7 @@ public void testAggregateOnUDF() throws HiveException, VectorizerCannotVectorize @Test public void testValidateNestedExpressions() { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false); - ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10)); + ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(Integer.valueOf(10)); GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan(); ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java index 1453029bbe..8b45a627ad 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java @@ -42,7 +42,7 @@ public void testStringToDate() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52")); - DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2")); + DeferredObject valueObj2 = new DeferredJavaObject(Integer.valueOf("2")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); @@ -68,7 +68,7 @@ public void testTimestampToDate() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2( Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0).toString()))); - DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3")); + DeferredObject valueObj2 = new DeferredJavaObject(Integer.valueOf("3")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); @@ -94,7 +94,7 @@ public void testDateWritablepToDate() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20))); - DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4")); + DeferredObject valueObj2 = new DeferredJavaObject(Integer.valueOf("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); @@ -134,7 +134,7 @@ public void testShortDataTypeAsDays() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20))); - DeferredObject valueObj2 = new DeferredJavaObject(new Short("4")); + DeferredObject valueObj2 = new DeferredJavaObject(Short.valueOf("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java index 3705d9994b..b70a1dc6c3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java @@ -42,7 +42,7 @@ public void testStringToDate() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52")); - DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2")); + DeferredObject valueObj2 = new DeferredJavaObject(Integer.valueOf(2)); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); @@ -68,7 +68,7 @@ public void testTimestampToDate() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2( Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0).toString()))); - DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3")); + DeferredObject valueObj2 = new DeferredJavaObject(Integer.valueOf(3)); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); @@ -94,7 +94,7 @@ public void testDateWritablepToDate() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20))); - DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4")); + DeferredObject valueObj2 = new DeferredJavaObject(Integer.valueOf(4)); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); @@ -134,7 +134,7 @@ public void testShortDataTypeAsDays() throws HiveException { udf.initialize(arguments); DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20))); - DeferredObject valueObj2 = new DeferredJavaObject(new Short("4")); + DeferredObject valueObj2 = new DeferredJavaObject(Short.valueOf("4")); DeferredObject[] args = {valueObj1, valueObj2}; DateWritableV2 output = (DateWritableV2) udf.evaluate(args); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java index c0f9726d99..01579d6cde 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java @@ -341,7 +341,7 @@ private ObjectInspector getObjectInspector(TypeInfo typeInfo) { } private void chooseSchema(SupportedTypes supportedTypes, int maxComplexDepth) { - HashSet hashSet = null; + HashSet hashSet = null; final boolean allTypes; final boolean onlyOne = (r.nextInt(100) == 7); if (onlyOne) { @@ -400,7 +400,7 @@ private void chooseSchema(SupportedTypes supportedTypes, int maxComplexDepth) { typeNum = r.nextInt(maxTypeNum); - final Integer typeNumInteger = new Integer(typeNum); + final Integer typeNumInteger = Integer.valueOf(typeNum); if (!hashSet.contains(typeNumInteger)) { hashSet.add(typeNumInteger); break; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java b/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java index 2fe3e9a8b7..aacd978e60 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java @@ -136,7 +136,8 @@ public static void main(String[] args) throws Exception { unionMap.put("key_" + i, erMap); Complex complex = new Complex(rand.nextInt(), "record_" - + (new Integer(i)).toString(), alist, slist, islist, hash, unionMap, PropValueUnion.stringValue("test" + i), PropValueUnion.unionMStringString(hash), PropValueUnion.lString(slist)); + + String.valueOf(i), alist, slist, islist, hash, unionMap, PropValueUnion.stringValue("test" + i), + PropValueUnion.unionMStringString(hash), PropValueUnion.lString(slist)); Writable value = serializer.serialize(complex); writer.append(key, value); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index b43fb5e6f0..023130ac47 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -4971,6 +4971,7 @@ public void rename_partition(final String db_name, final String tbl_name, null, null); } + @Override public RenamePartitionResponse rename_partition_req( RenamePartitionRequest req) throws InvalidOperationException ,MetaException ,TException { rename_partition(req.getCatName(), req.getDbName(), req.getTableName(), req.getPartVals(), @@ -9376,7 +9377,7 @@ public void parse(String[] args) { "This usage has been deprecated, consider using the new command " + "line syntax (run with -h to see usage information)"); - this.port = new Integer(args[0]); + this.port = Integer.parseInt(args[0]); } // notice that command line options take precedence over the -- 2.18.0