From 39c3535258a20e67d81e5bb97b8ba2f6df9c6955 Mon Sep 17 00:00:00 2001 From: Ivan Suller Date: Wed, 20 Feb 2019 14:08:48 +0100 Subject: [PATCH] HIVE-21297: --- .../java/org/apache/hive/beeline/BeeLine.java | 2 +- .../java/org/apache/hive/beeline/Rows.java | 9 +-- .../apache/hadoop/hive/ql/log/PerfLogger.java | 4 +- .../hcatalog/templeton/tool/JobState.java | 4 +- .../main/java/org/apache/hive/hplsql/Cmp.java | 3 +- .../java/org/apache/hive/hplsql/Exec.java | 16 ++--- .../java/org/apache/hive/hplsql/Stmt.java | 8 +-- .../main/java/org/apache/hive/hplsql/Udf.java | 2 +- .../main/java/org/apache/hive/hplsql/Var.java | 24 +++---- .../hive/hplsql/functions/Function.java | 4 +- .../hive/hplsql/functions/FunctionString.java | 34 ++++----- .../hadoop/hive/ql/exec/GroupByOperator.java | 3 +- .../hive/ql/exec/mr/HadoopJobExecHelper.java | 2 +- .../hadoop/hive/ql/exec/spark/MapInput.java | 2 +- .../hive/ql/exec/spark/ShuffleTran.java | 2 +- .../calcite/stats/HiveRelMdRowCount.java | 5 +- .../calcite/stats/HiveRelMdSize.java | 2 +- .../correlation/CorrelationOptimizer.java | 6 +- .../physical/AbstractJoinTaskDispatcher.java | 2 +- .../ql/optimizer/physical/LlapDecider.java | 71 +++++++++---------- .../physical/LlapPreVectorizationPass.java | 2 +- .../ql/udf/generic/GenericUDAFAverage.java | 4 +- .../hive/ql/udf/generic/GenericUDAFSum.java | 4 +- .../hive/ql/udf/generic/NGramEstimator.java | 6 +- .../exec/vector/TestVectorizationContext.java | 28 ++++---- .../expressions/TestVectorBetweenIn.java | 2 +- .../hive/ql/io/orc/TestInputOutputFormat.java | 2 +- .../TestParquetRecordReaderWrapper.java | 4 +- .../ql/io/sarg/TestSearchArgumentImpl.java | 4 +- .../udf/generic/TestGenericUDFOPDivide.java | 11 +-- .../ql/udf/generic/TestGenericUDFOPMinus.java | 10 +-- .../udf/generic/TestGenericUDFOPMultiply.java | 10 +-- .../udf/generic/TestGenericUDFOPNegative.java | 12 ++-- .../ql/udf/generic/TestGenericUDFOPPlus.java | 10 +-- .../udf/generic/TestGenericUDFOPPositive.java | 12 ++-- .../ql/udf/generic/TestGenericUDFPower.java | 18 ++--- .../hive/serde2/thrift/ColumnBuffer.java | 2 +- .../TestUnionStructObjectInspector.java | 2 +- .../hadoop/hive/common/ndv/fm/FMSketch.java | 10 ++- .../hive/metastore/metrics/PerfLogger.java | 4 +- 40 files changed, 191 insertions(+), 171 deletions(-) diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java index 65eee2c2bb..54edfeb030 100644 --- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -515,7 +515,7 @@ String loc(String res, Object[] params) { protected String locElapsedTime(long milliseconds) { if (getOpts().getShowElapsedTime()) { - return loc("time-ms", new Object[] {new Double(milliseconds / 1000d)}); + return loc("time-ms", new Object[] {Double.valueOf(milliseconds / 1000d)}); } return ""; } diff --git a/beeline/src/java/org/apache/hive/beeline/Rows.java b/beeline/src/java/org/apache/hive/beeline/Rows.java index e3b983ef41..a5f69e6cb2 100644 --- a/beeline/src/java/org/apache/hive/beeline/Rows.java +++ b/beeline/src/java/org/apache/hive/beeline/Rows.java @@ -58,6 +58,7 @@ this.convertBinaryArray = beeLine.getOpts().getConvertBinaryArrayToString(); } + @Override public void remove() { throw new UnsupportedOperationException(); } @@ -87,7 +88,7 @@ boolean isPrimaryKey(int col) { if (table == null || table.length() == 0 || column == null || column.length() == 0) { - return (primaryKeys[col] = new Boolean(false)).booleanValue(); + return (primaryKeys[col] = Boolean.FALSE).booleanValue(); } ResultSet pks = beeLine.getDatabaseConnection().getDatabaseMetaData().getPrimaryKeys( @@ -97,16 +98,16 @@ boolean isPrimaryKey(int col) { while (pks.next()) { if (column.equalsIgnoreCase( pks.getString("COLUMN_NAME"))) { - return (primaryKeys[col] = new Boolean(true)).booleanValue(); + return (primaryKeys[col] = Boolean.TRUE).booleanValue(); } } } finally { pks.close(); } - return (primaryKeys[col] = new Boolean(false)).booleanValue(); + return (primaryKeys[col] = Boolean.FALSE).booleanValue(); } catch (SQLException sqle) { - return (primaryKeys[col] = new Boolean(false)).booleanValue(); + return (primaryKeys[col] = Boolean.FALSE).booleanValue(); } } diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java index 0ee41c0898..a9cb009191 100644 --- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java +++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java @@ -134,7 +134,7 @@ public static void setPerfLogger(PerfLogger resetPerfLogger) { */ public void PerfLogBegin(String callerName, String method) { long startTime = System.currentTimeMillis(); - startTimes.put(method, new Long(startTime)); + startTimes.put(method, Long.valueOf(startTime)); if (LOG.isDebugEnabled()) { LOG.debug(""); } @@ -159,7 +159,7 @@ public long PerfLogEnd(String callerName, String method) { public long PerfLogEnd(String callerName, String method, String additionalInfo) { Long startTime = startTimes.get(method); long endTime = System.currentTimeMillis(); - endTimes.put(method, new Long(endTime)); + endTimes.put(method, Long.valueOf(endTime)); long duration = startTime == null ? -1 : endTime - startTime.longValue(); if (LOG.isDebugEnabled()) { diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java index 74cf1e5e83..246ea79509 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java @@ -220,7 +220,7 @@ public void setUser(String user) throws IOException { String jsonString = getField("userArgs"); - return (Map)JsonBuilder.jsonToMap(jsonString); + return JsonBuilder.jsonToMap(jsonString); } public void setUserArgs(Map userArgs) throws IOException @@ -282,7 +282,7 @@ public Long getLongField(String name) return null; else { try { - return new Long(s); + return Long.valueOf(s); } catch (NumberFormatException e) { LOG.error("templeton: bug " + name + " " + s + " : " + e); return null; diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java b/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java index 30d45871cf..3051f4b011 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java @@ -121,6 +121,7 @@ Integer run(HplsqlParser.Cmp_stmtContext ctx) { /** * Get data for comparison from the source */ + @Override public void run() { exec.executeQuery(ctx, query, conn); } @@ -208,7 +209,7 @@ else if (query2.error()) { exec.signal(e); return null; } - return new Boolean(equal); + return Boolean.valueOf(equal); } /** diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java index 9e27ba1e4d..5dfcc5bd5c 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java @@ -160,7 +160,7 @@ public Var setVariable(String name, String value) { } public Var setVariable(String name, int value) { - return setVariable(name, new Var(new Long(value))); + return setVariable(name, new Var(Long.valueOf(value))); } /** @@ -654,9 +654,9 @@ public void registerUdf() { void initOptions() { Iterator> i = exec.conf.iterator(); while (i.hasNext()) { - Entry item = (Entry)i.next(); - String key = (String)item.getKey(); - String value = (String)item.getValue(); + Entry item = i.next(); + String key = item.getKey(); + String value = item.getValue(); if (key == null || value == null || !key.startsWith("hplsql.")) { continue; } @@ -682,7 +682,7 @@ else if (key.startsWith("hplsql.")) { * Set SQLCODE */ public void setSqlCode(int sqlcode) { - Long code = new Long(sqlcode); + Long code = Long.valueOf(sqlcode); Var var = findVariable(SQLCODE); if (var != null) { var.setValue(code); @@ -720,7 +720,7 @@ public void setSqlState(String sqlstate) { public void setHostCode(int code) { Var var = findVariable(HOSTCODE); if (var != null) { - var.setValue(new Long(code)); + var.setValue(Long.valueOf(code)); } } @@ -2170,7 +2170,7 @@ public Integer visitSingle_quotedString(HplsqlParser.Single_quotedStringContext */ @Override public Integer visitInt_number(HplsqlParser.Int_numberContext ctx) { - exec.stack.push(new Var(new Long(ctx.getText()))); + exec.stack.push(new Var(Long.valueOf(ctx.getText()))); return 0; } @@ -2203,7 +2203,7 @@ public Integer visitBool_literal(HplsqlParser.Bool_literalContext ctx) { if (ctx.T_FALSE() != null) { val = false; } - stackPush(new Var(new Boolean(val))); + stackPush(new Var(Boolean.valueOf(val))); return 0; } diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java index eabb9fa7e0..07a9c98979 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java @@ -997,9 +997,9 @@ public Integer forRange(HplsqlParser.For_range_stmtContext ctx) { private Var setIndex(int start, int end, HplsqlParser.For_range_stmtContext ctx) { if (ctx.T_REVERSE() == null) - return new Var(ctx.L_ID().getText(), new Long(start)); + return new Var(ctx.L_ID().getText(), Long.valueOf(start)); else - return new Var(ctx.L_ID().getText(), new Long(end)); + return new Var(ctx.L_ID().getText(), Long.valueOf(end)); } /** @@ -1008,9 +1008,9 @@ private Var setIndex(int start, int end, HplsqlParser.For_range_stmtContext ctx) private void updateIndex(int step, Var index, HplsqlParser.For_range_stmtContext ctx) { if (ctx.T_REVERSE() == null) - index.increment(new Long(step)); + index.increment(step); else - index.decrement(new Long(step)); + index.decrement(step); } /** diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java b/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java index eddf6d2fba..dde86d66ab 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Udf.java @@ -103,7 +103,7 @@ void setParameters(DeferredObject[] arguments) throws HiveException { else if (argumentsOI[i] instanceof IntObjectInspector) { Integer value = (Integer)((IntObjectInspector)argumentsOI[i]).getPrimitiveJavaObject(arguments[i].get()); if (value != null) { - exec.setVariable(name, new Var(new Long(value))); + exec.setVariable(name, new Var(Long.valueOf(value))); } } else if (argumentsOI[i] instanceof LongObjectInspector) { diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java index a117cb6d79..9033b71682 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java @@ -194,7 +194,7 @@ else if (val.type == Type.DOUBLE) { } else if (type == Type.DOUBLE) { if (val.type == Type.STRING) { - value = new Double((String)val.value); + value = Double.valueOf((String) val.value); } else if (val.type == Type.BIGINT || val.type == Type.DECIMAL) { value = Double.valueOf(val.doubleValue()); @@ -265,13 +265,13 @@ public Var setValue(ResultSet rs, ResultSetMetaData rsm, int idx) throws SQLExce } else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT || type == java.sql.Types.SMALLINT || type == java.sql.Types.TINYINT) { - cast(new Var(new Long(rs.getLong(idx)))); + cast(new Var(Long.valueOf(rs.getLong(idx)))); } else if (type == java.sql.Types.DECIMAL || type == java.sql.Types.NUMERIC) { cast(new Var(rs.getBigDecimal(idx))); } else if (type == java.sql.Types.FLOAT || type == java.sql.Types.DOUBLE) { - cast(new Var(new Double(rs.getDouble(idx)))); + cast(new Var(Double.valueOf(rs.getDouble(idx)))); } return this; } @@ -411,7 +411,7 @@ else if (var.type == Type.DECIMAL) { } } else if (type == Type.STRING && var.type == Type.STRING && - ((String)value).equals((String)var.value)) { + ((String)value).equals(var.value)) { return true; } else if (type == Type.DECIMAL && var.type == Type.DECIMAL && @@ -475,9 +475,9 @@ public BigDecimal percentDiff(Var var) { /** * Increment an integer value */ - public Var increment(Long i) { + public Var increment(long i) { if (type == Type.BIGINT) { - value = new Long(((Long)value).longValue() + i); + value = Long.valueOf(((Long) value).longValue() + i); } return this; } @@ -485,12 +485,12 @@ public Var increment(Long i) { /** * Decrement an integer value */ - public Var decrement(Long i) { - if (type == Type.BIGINT) { - value = new Long(((Long)value).longValue() - i); - } - return this; - } + public Var decrement(long i) { + if (type == Type.BIGINT) { + value = Long.valueOf(((Long) value).longValue() - i); + } + return this; + } /** * Return an integer value diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java index 61bcdeca1c..279dd2048f 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java @@ -711,7 +711,7 @@ void evalInt(Long i) { } void evalInt(int i) { - evalInt(new Long(i)); + evalInt(Long.valueOf(i)); } /** @@ -744,7 +744,7 @@ Var evalPop(ParserRuleContext ctx, int value) { if (ctx != null) { return evalPop(ctx); } - return new Var(new Long(value)); + return new Var(Long.valueOf(value)); } /** diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java index 9c1037cb43..8b937e45f8 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java @@ -30,20 +30,20 @@ public FunctionString(Exec e) { */ @Override public void register(Function f) { - f.map.put("CONCAT", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { concat(ctx); }}); - f.map.put("CHAR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { char_(ctx); }}); - f.map.put("INSTR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { instr(ctx); }}); - f.map.put("LEN", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { len(ctx); }}); - f.map.put("LENGTH", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { length(ctx); }}); - f.map.put("LOWER", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { lower(ctx); }}); - f.map.put("REPLACE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { replace(ctx); }}); - f.map.put("SUBSTR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { substr(ctx); }}); - f.map.put("SUBSTRING", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { substr(ctx); }}); - f.map.put("TO_CHAR", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { toChar(ctx); }}); - f.map.put("UPPER", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { upper(ctx); }}); + f.map.put("CONCAT", this::concat); + f.map.put("CHAR", this::char_); + f.map.put("INSTR", this::instr); + f.map.put("LEN", this::len); + f.map.put("LENGTH", this::length); + f.map.put("LOWER", this::lower); + f.map.put("REPLACE", this::replace); + f.map.put("SUBSTR", this::substr); + f.map.put("SUBSTRING", this::substr); + f.map.put("TO_CHAR", this::toChar); + f.map.put("UPPER", this::upper); - f.specMap.put("SUBSTRING", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { substring(ctx); }}); - f.specMap.put("TRIM", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { trim(ctx); }}); + f.specMap.put("SUBSTRING", this::substring); + f.specMap.put("TRIM", this::trim); } /** @@ -96,7 +96,7 @@ void instr(HplsqlParser.Expr_func_paramsContext ctx) { return; } else if(str.isEmpty()) { - evalInt(new Long(0)); + evalInt(0); return; } String substr = evalPop(ctx.func_param(1).expr()).toString(); @@ -139,7 +139,7 @@ else if(str.isEmpty()) { } } } - evalInt(new Long(idx)); + evalInt(idx); } /** @@ -151,7 +151,7 @@ void len(HplsqlParser.Expr_func_paramsContext ctx) { return; } int len = evalPop(ctx.func_param(0).expr()).toString().trim().length(); - evalInt(new Long(len)); + evalInt(len); } /** @@ -163,7 +163,7 @@ void length(HplsqlParser.Expr_func_paramsContext ctx) { return; } int len = evalPop(ctx.func_param(0).expr()).toString().length(); - evalInt(new Long(len)); + evalInt(len); } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java index e2feb3f988..583460f2c4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java @@ -500,8 +500,7 @@ private int getSize(int pos, Class c, Field f) { || c.isInstance(Short.valueOf((short) 0)) || c.isInstance(Integer.valueOf(0)) || c.isInstance(Long.valueOf(0)) - || c.isInstance(new Float(0)) - || c.isInstance(new Double(0))) { + || c.isInstance(Float.valueOf(0)) || c.isInstance(Double.valueOf(0))) { return javaSizePrimitiveType; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index 9ec820d500..d2ca33d96c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -646,7 +646,7 @@ private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { Map exctractedCounters = new HashMap(); for (Counters.Group cg : counters) { for (Counter c : cg) { - exctractedCounters.put(cg.getName() + "::" + c.getName(), new Double(c.getCounter())); + exctractedCounters.put(cg.getName() + "::" + c.getName(), Double.valueOf(c.getCounter())); } } return exctractedCounters; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapInput.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapInput.java index b242f57db8..55096ad113 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapInput.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/MapInput.java @@ -100,7 +100,7 @@ public String getName() { @Override public Boolean isCacheEnable() { - return new Boolean(toCache); + return Boolean.valueOf(toCache); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ShuffleTran.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ShuffleTran.java index f69807954b..26dcf7f545 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ShuffleTran.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ShuffleTran.java @@ -71,7 +71,7 @@ public String getName() { @Override public Boolean isCacheEnable() { - return new Boolean(toCache); + return Boolean.valueOf(toCache); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java index be34673b92..58a74900ef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java @@ -72,6 +72,7 @@ protected HiveRelMdRowCount() { super(); } + @Override public Double getRowCount(Join join, RelMetadataQuery mq) { // Try to infer from constraints first final Pair constraintBasedResult = @@ -135,11 +136,11 @@ public Double getRowCount(Sort rel, RelMetadataQuery mq) { if (rowCount != null && rel.fetch != null) { final int offset = rel.offset == null ? 0 : RexLiteral.intValue(rel.offset); final int limit = RexLiteral.intValue(rel.fetch); - final Double offsetLimit = new Double(offset + limit); + final int offsetLimit = offset + limit; // offsetLimit is smaller than rowCount of the input operator // thus, we return the offsetLimit if (offsetLimit < rowCount) { - return offsetLimit; + return Double.valueOf(offsetLimit); } } return rowCount; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdSize.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdSize.java index 97097381d9..c1cd34478d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdSize.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdSize.java @@ -71,7 +71,7 @@ private HiveRelMdSize() {} list.add(columnStatistic.getAvgColLen()); } } else { - list.add(new Double(0)); + list.add(Double.valueOf(0)); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java index 7ff92edd91..40cfcf5a8f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java @@ -154,7 +154,7 @@ private void findPossibleAutoConvertedJoinOperators() throws SemanticException { aliasTotalKnownInputSize += size; Long es = aliasToSize.get(alias); if(es == null) { - es = new Long(0); + es = Long.valueOf(0); } es += size; aliasToSize.put(alias, es); @@ -207,6 +207,7 @@ private void findPossibleAutoConvertedJoinOperators() throws SemanticException { * current parse context * @throws SemanticException */ + @Override public ParseContext transform(ParseContext pctx) throws SemanticException { pCtx = pctx; @@ -253,7 +254,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException { private void analyzeReduceSinkOperatorsOfJoinOperator(JoinCondDesc[] joinConds, List> rsOps, Operator curentRsOp, Set correlatedRsOps) { - if (correlatedRsOps.contains((ReduceSinkOperator) curentRsOp)) { + if (correlatedRsOps.contains(curentRsOp)) { return; } correlatedRsOps.add((ReduceSinkOperator) curentRsOp); @@ -569,6 +570,7 @@ private boolean sameOrder(String order1, String order2) { return reduceSinkOperators; } + @Override public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { CorrelationNodeProcCtx corrCtx = (CorrelationNodeProcCtx) ctx; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java index ed5ca5e642..0b5de81158 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java @@ -140,7 +140,7 @@ public long getTotalKnownInputSize(Context context, MapWork currWork, aliasTotalKnownInputSize += size; Long es = aliasToSize.get(alias); if (es == null) { - es = new Long(0); + es = Long.valueOf(0); } es += size; aliasToSize.put(alias, es); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java index 0a94254be8..385c87cd87 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java @@ -382,50 +382,45 @@ private boolean checkAggregators(Collection aggs) { public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { LOG.debug("Cannot run operator [" + n + "] in llap mode."); - return new Boolean(false); + return Boolean.FALSE; } }); - opRules.put(new RuleRegExp("No user code in fil", FilterOperator.getOperatorName() + "%"), - new NodeProcessor() { - @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, - Object... os) { - ExprNodeDesc expr = ((FilterOperator)n).getConf().getPredicate(); - Boolean retval = new Boolean(checkExpression(expr)); - if (!retval) { - LOG.info("Cannot run filter operator [" + n + "] in llap mode"); - } - return new Boolean(retval); + opRules.put(new RuleRegExp("No user code in fil", FilterOperator.getOperatorName() + "%"), new NodeProcessor() { + @Override + public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { + ExprNodeDesc expr = ((FilterOperator) n).getConf().getPredicate(); + boolean retval = checkExpression(expr); + if (!retval) { + LOG.info("Cannot run filter operator [" + n + "] in llap mode"); } - }); - opRules.put(new RuleRegExp("No user code in gby", GroupByOperator.getOperatorName() + "%"), - new NodeProcessor() { - @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, - Object... os) { - @SuppressWarnings("unchecked") - List aggs = ((Operator) n).getConf().getAggregators(); - Boolean retval = new Boolean(checkAggregators(aggs)); - if (!retval) { - LOG.info("Cannot run group by operator [" + n + "] in llap mode"); - } - return new Boolean(retval); + return Boolean.valueOf(retval); + } + }); + opRules.put(new RuleRegExp("No user code in gby", GroupByOperator.getOperatorName() + "%"), new NodeProcessor() { + @Override + public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { + @SuppressWarnings("unchecked") + List aggs = ((Operator) n).getConf().getAggregators(); + boolean retval = checkAggregators(aggs); + if (!retval) { + LOG.info("Cannot run group by operator [" + n + "] in llap mode"); } - }); + return Boolean.valueOf(retval); + } + }); opRules.put(new RuleRegExp("No user code in select", SelectOperator.getOperatorName() + "%"), new NodeProcessor() { - @Override - public Object process(Node n, Stack s, NodeProcessorCtx c, - Object... os) { - @SuppressWarnings({ "unchecked" }) - List exprs = ((Operator) n).getConf().getColList(); - Boolean retval = new Boolean(checkExpressions(exprs)); - if (!retval) { - LOG.info("Cannot run select operator [" + n + "] in llap mode"); + @Override + public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { + @SuppressWarnings({"unchecked"}) + List exprs = ((Operator) n).getConf().getColList(); + boolean retval = checkExpressions(exprs); + if (!retval) { + LOG.info("Cannot run select operator [" + n + "] in llap mode"); + } + return Boolean.valueOf(retval); } - return new Boolean(retval); - } - }); + }); if (!conf.getBoolVar(HiveConf.ConfVars.LLAP_ENABLE_GRACE_JOIN_IN_LLAP)) { opRules.put( @@ -438,7 +433,7 @@ public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { && !(mapJoinOp.getConf().isDynamicPartitionHashJoin())) { mapJoinOpList.add((MapJoinOperator) n); } - return new Boolean(true); + return Boolean.TRUE; } }); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java index 73a01d131a..8a7a5418b7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java @@ -111,7 +111,7 @@ public Object process(Node n, Stack s, NodeProcessorCtx c, Object... os) { && !(mapJoinOp.getConf().isDynamicPartitionHashJoin())) { mapJoinOp.getConf().setHybridHashJoin(false); } - return new Boolean(true); + return Boolean.TRUE; } }); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java index 807eca92f1..2962fa5cff 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java @@ -130,7 +130,7 @@ public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo paramInfo) @Override public void doReset(AverageAggregationBuffer aggregation) throws HiveException { aggregation.count = 0; - aggregation.sum = new Double(0); + aggregation.sum = Double.valueOf(0); aggregation.uniqueObjects = new HashSet(); } @@ -225,7 +225,7 @@ protected DoubleWritable getNextResult( throws HiveException { AverageAggregationBuffer myagg = (AverageAggregationBuffer) ss.wrappedBuf; return myagg.count == 0 ? null : new Object[] { - new Double(myagg.sum), myagg.count }; + myagg.sum, myagg.count}; } }; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java index 79bf2be4ec..6ce8734e8f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java @@ -492,7 +492,7 @@ protected Double getCurrentIntermediateResult( org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStreamingEvaluator.SumAvgEnhancer.SumAvgStreamingState ss) throws HiveException { SumDoubleAgg myagg = (SumDoubleAgg) ss.wrappedBuf; - return myagg.empty ? null : new Double(myagg.sum); + return myagg.empty ? null : myagg.sum; } }; @@ -621,7 +621,7 @@ protected Long getCurrentIntermediateResult( org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStreamingEvaluator.SumAvgEnhancer.SumAvgStreamingState ss) throws HiveException { SumLongAgg myagg = (SumLongAgg) ss.wrappedBuf; - return myagg.empty ? null : new Long(myagg.sum); + return myagg.empty ? null : myagg.sum; } }; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java index 659512525d..30f06e1bbe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java @@ -102,6 +102,7 @@ public void reset() { ArrayList result = new ArrayList(); ArrayList, Double>> list = new ArrayList(ngrams.entrySet()); Collections.sort(list, new Comparator, Double>>() { + @Override public int compare(Map.Entry, Double> o1, Map.Entry, Double> o2) { int result = o2.getValue().compareTo(o1.getValue()); @@ -155,7 +156,7 @@ public void add(ArrayList ng) throws HiveException { Double curFreq = ngrams.get(ng); if(curFreq == null) { // new n-gram - curFreq = new Double(1.0); + curFreq = Double.valueOf(1.0); } else { // existing n-gram, just increment count curFreq++; @@ -192,6 +193,7 @@ public void add(ArrayList ng) throws HiveException { private void trim(boolean finalTrim) throws HiveException { ArrayList,Double>> list = new ArrayList(ngrams.entrySet()); Collections.sort(list, new Comparator,Double>>() { + @Override public int compare(Map.Entry,Double> o1, Map.Entry,Double> o2) { return o1.getValue().compareTo(o2.getValue()); @@ -246,7 +248,7 @@ public void merge(List other) throws HiveException { double val = Double.parseDouble( other.get(i).toString() ); Double myval = ngrams.get(key); if(myval == null) { - myval = new Double(val); + myval = Double.valueOf(val); } else { myval += val; } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java index 1fa268d7af..d261409efd 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java @@ -474,7 +474,7 @@ public void testVectorizeFilterAndOrExpression() throws HiveException { greaterExprDesc.setChildren(children1); ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(Float.class, "col2", "table", false); - ExprNodeConstantDesc const2Desc = new ExprNodeConstantDesc(new Float(1.0)); + ExprNodeConstantDesc const2Desc = new ExprNodeConstantDesc(Float.valueOf(1.0f)); GenericUDFOPLessThan udf2 = new GenericUDFOPLessThan(); ExprNodeGenericFuncDesc lessExprDesc = new ExprNodeGenericFuncDesc(); @@ -535,7 +535,7 @@ public void testVectorizeFilterMultiAndOrExpression() throws HiveException { greaterExprDesc.setChildren(children1); ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(Float.class, "col2", "table", false); - ExprNodeConstantDesc const2Desc = new ExprNodeConstantDesc(new Float(1.0)); + ExprNodeConstantDesc const2Desc = new ExprNodeConstantDesc(Float.valueOf(1.0f)); GenericUDFOPLessThan udf2 = new GenericUDFOPLessThan(); ExprNodeGenericFuncDesc lessExprDesc = new ExprNodeGenericFuncDesc(); @@ -1114,7 +1114,7 @@ public void testBetweenFilters() throws HiveException { // string BETWEEN GenericUDFBetween udf = new GenericUDFBetween(); List children1 = new ArrayList(); - children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword + children1.add(new ExprNodeConstantDesc(Boolean.FALSE)); // no NOT keyword children1.add(col1Expr); children1.add(constDesc); children1.add(constDesc2); @@ -1131,7 +1131,7 @@ public void testBetweenFilters() throws HiveException { assertTrue(ve instanceof FilterStringColumnBetween); // string NOT BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword + children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE)); // has NOT keyword ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterStringColumnNotBetween); @@ -1144,7 +1144,7 @@ public void testBetweenFilters() throws HiveException { // CHAR BETWEEN udf = new GenericUDFBetween(); children1 = new ArrayList(); - children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword + children1.add(new ExprNodeConstantDesc(Boolean.FALSE)); // no NOT keyword children1.add(col1Expr); children1.add(constDesc); children1.add(constDesc2); @@ -1156,7 +1156,7 @@ public void testBetweenFilters() throws HiveException { assertTrue(ve instanceof FilterCharColumnBetween); // CHAR NOT BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword + children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE)); // has NOT keyword ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterCharColumnNotBetween); @@ -1169,7 +1169,7 @@ public void testBetweenFilters() throws HiveException { // VARCHAR BETWEEN udf = new GenericUDFBetween(); children1 = new ArrayList(); - children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword + children1.add(new ExprNodeConstantDesc(Boolean.FALSE)); // no NOT keyword children1.add(col1Expr); children1.add(constDesc); children1.add(constDesc2); @@ -1181,12 +1181,12 @@ public void testBetweenFilters() throws HiveException { assertTrue(ve instanceof FilterVarCharColumnBetween); // VARCHAR NOT BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword + children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE)); // has NOT keyword ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterVarCharColumnNotBetween); // long BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(false))); + children1.set(0, new ExprNodeConstantDesc(Boolean.FALSE)); children1.set(1, new ExprNodeColumnDesc(Long.class, "col1", "table", false)); children1.set(2, new ExprNodeConstantDesc(10)); children1.set(3, new ExprNodeConstantDesc(20)); @@ -1194,12 +1194,12 @@ public void testBetweenFilters() throws HiveException { assertTrue(ve instanceof FilterLongColumnBetween); // long NOT BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); + children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE)); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterLongColumnNotBetween); // double BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(false))); + children1.set(0, new ExprNodeConstantDesc(Boolean.FALSE)); children1.set(1, new ExprNodeColumnDesc(Double.class, "col1", "table", false)); children1.set(2, new ExprNodeConstantDesc(10.0d)); children1.set(3, new ExprNodeConstantDesc(20.0d)); @@ -1207,12 +1207,12 @@ public void testBetweenFilters() throws HiveException { assertTrue(ve instanceof FilterDoubleColumnBetween); // double NOT BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); + children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE)); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterDoubleColumnNotBetween); // timestamp BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(false))); + children1.set(0, new ExprNodeConstantDesc(Boolean.FALSE)); children1.set(1, new ExprNodeColumnDesc(Timestamp.class, "col1", "table", false)); children1.set(2, new ExprNodeConstantDesc("2013-11-05 00:00:00.000")); children1.set(3, new ExprNodeConstantDesc("2013-11-06 00:00:00.000")); @@ -1220,7 +1220,7 @@ public void testBetweenFilters() throws HiveException { assertEquals(FilterTimestampColumnBetween.class, ve.getClass()); // timestamp NOT BETWEEN - children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); + children1.set(0, new ExprNodeConstantDesc(Boolean.TRUE)); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertEquals(FilterTimestampColumnNotBetween.class, ve.getClass()); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java index 16bb445eee..3f09cb1c39 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorBetweenIn.java @@ -464,7 +464,7 @@ private boolean doBetweenInVariation(Random random, String typeName, List children = new ArrayList(); if (isBetween) { - children.add(new ExprNodeConstantDesc(new Boolean(isInvert))); + children.add(new ExprNodeConstantDesc(Boolean.valueOf(isInvert))); } children.add(col1Expr); for (Object compareObject : compareList) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index 5c13d45d6e..25cd65737b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -2655,7 +2655,7 @@ public void testSplitElimination() throws Exception { SearchArgument sarg = SearchArgumentFactory.newBuilder() .startAnd() - .lessThan("z", PredicateLeaf.Type.LONG, new Long(0)) + .lessThan("z", PredicateLeaf.Type.LONG, Long.valueOf(0)) .end() .build(); conf.set("sarg.pushdown", toKryo(sarg)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRecordReaderWrapper.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRecordReaderWrapper.java index c4a4c21cfe..0210a0a372 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRecordReaderWrapper.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRecordReaderWrapper.java @@ -153,8 +153,8 @@ public void testBuilderFloat() throws Exception { .lessThan("x1", PredicateLeaf.Type.LONG, 22L) .lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()) - .equals("z", PredicateLeaf.Type.FLOAT, new Double(0.22)) - .equals("z1", PredicateLeaf.Type.FLOAT, new Double(0.22)) + .equals("z", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)) + .equals("z1", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)) .end() .build(); MessageType schema = MessageTypeParser.parseMessageType("message test {" + diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java index 79a2f4120d..0f402ec29a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java @@ -412,8 +412,8 @@ public void testBuilderFloat() throws Exception { .lessThan("x1", PredicateLeaf.Type.LONG, 22L) .lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()) - .equals("z", PredicateLeaf.Type.FLOAT, new Double(0.22)) - .equals("z1", PredicateLeaf.Type.FLOAT, new Double(0.22)) + .equals("z", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)) + .equals("z1", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)) .end() .build(); assertEquals("leaf-0 = (LESS_THAN x 22), " + diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java index e5098bdc66..11f0876525 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; @@ -42,6 +41,8 @@ public class TestGenericUDFOPDivide extends AbstractTestGenericUDFOPNumeric { + private static final double EPSILON = 1E-6; + @Test public void testByteDivideShort() throws HiveException { GenericUDFOPDivide udf = new GenericUDFOPDivide(); @@ -82,7 +83,7 @@ public void testVarcharDivideInt() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(123.0 / 456.0), new Double(res.get())); + Assert.assertEquals(123.0 / 456.0, res.get(), EPSILON); } @Test @@ -103,7 +104,7 @@ public void testDoubleDivideLong() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(0.45), new Double(res.get())); + Assert.assertEquals(0.45, res.get(), EPSILON); } @Test @@ -145,7 +146,7 @@ public void testFloatDivideFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(3.0), new Double(res.get())); + Assert.assertEquals(3.0, res.get(), EPSILON); } @Test @@ -166,7 +167,7 @@ public void testDouleDivideDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(74.52 / 234.97), new Double(res.get())); + Assert.assertEquals(74.52 / 234.97, res.get(), EPSILON); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java index 504aa7a078..264daaa2fe 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java @@ -49,6 +49,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric { + private static final double EPSILON = 1E-6; + @Test public void testByteMinusShort() throws HiveException { GenericUDFOPMinus udf = new GenericUDFOPMinus(); @@ -89,7 +91,7 @@ public void testVarcharMinusInt() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-333.0), new Double(res.get())); + Assert.assertEquals(-333.0, res.get(), EPSILON); } @Test @@ -111,7 +113,7 @@ public void testDoubleMinusLong() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-5.5), new Double(res.get())); + Assert.assertEquals(-5.5, res.get(), EPSILON); } @Test @@ -153,7 +155,7 @@ public void testFloatMinusFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo); FloatWritable res = (FloatWritable) udf.evaluate(args); - Assert.assertEquals(new Float(4.5), new Float(res.get())); + Assert.assertEquals(4.5, res.get(), EPSILON); } @Test @@ -174,7 +176,7 @@ public void testDouleMinusDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-160.45), new Double(res.get())); + Assert.assertEquals(-160.45, res.get(), EPSILON); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java index b945a8529b..0f05f25485 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java @@ -41,6 +41,8 @@ public class TestGenericUDFOPMultiply extends AbstractTestGenericUDFOPNumeric { + private static final double EPSILON = 1E-6; + @Test public void testByteTimesShort() throws HiveException { GenericUDFOPMultiply udf = new GenericUDFOPMultiply(); @@ -81,7 +83,7 @@ public void testVarcharTimesInt() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(123 * 456), new Double(res.get())); + Assert.assertEquals(123 * 456, res.get(), EPSILON); } @Test @@ -102,7 +104,7 @@ public void testDoubleTimesLong() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(45.0), new Double(res.get())); + Assert.assertEquals(45.0, res.get(), EPSILON); } @Test @@ -144,7 +146,7 @@ public void testFloatTimesFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo); FloatWritable res = (FloatWritable) udf.evaluate(args); - Assert.assertEquals(new Float(0.0), new Float(res.get())); + Assert.assertEquals(0.0, res.get(), EPSILON); } @Test @@ -165,7 +167,7 @@ public void testDouleTimesDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(17509.9644), new Double(res.get())); + Assert.assertEquals(17509.9644, res.get(), EPSILON); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java index 7a285c8938..0dc4f9f86d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java @@ -46,6 +46,8 @@ public class TestGenericUDFOPNegative { + private static final double EPSILON = 1E-6; + @Test public void testByte() throws HiveException { GenericUDFOPNegative udf = new GenericUDFOPNegative(); @@ -133,7 +135,7 @@ public void testFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.floatTypeInfo, oi.getTypeInfo()); FloatWritable res = (FloatWritable) udf.evaluate(args); - Assert.assertEquals(new Float(-323.4747f), new Float(res.get())); + Assert.assertEquals(-323.4747f, res.get(), EPSILON); } @Test @@ -151,7 +153,7 @@ public void testDouble() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-32300.004747), new Double(res.get())); + Assert.assertEquals(-32300.004747, res.get(), EPSILON); } @Test @@ -188,7 +190,7 @@ public void testString() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-32300.004747), new Double(res.get())); + Assert.assertEquals(-32300.004747, res.get(), EPSILON); } @Test @@ -208,7 +210,7 @@ public void testVarchar() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-32300.004747), new Double(res.get())); + Assert.assertEquals(-32300.004747, res.get(), EPSILON); } @Test @@ -228,7 +230,7 @@ public void testChar() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(-32300.004747), new Double(res.get())); + Assert.assertEquals(-32300.004747, res.get(), EPSILON); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java index 2169999bce..fe7dc7b0f8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java @@ -49,6 +49,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric { + private static final double EPSILON = 1E-6; + @Test public void testBytePlusShort() throws HiveException { GenericUDFOPPlus udf = new GenericUDFOPPlus(); @@ -91,7 +93,7 @@ public void testVarcharPlusInt() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(579.0), new Double(res.get())); + Assert.assertEquals(579.0, res.get(), EPSILON); } @Test @@ -113,7 +115,7 @@ public void testDoublePlusLong() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(14.5), new Double(res.get())); + Assert.assertEquals(14.5, res.get(), EPSILON); } @Test @@ -157,7 +159,7 @@ public void testFloatPlusFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo); FloatWritable res = (FloatWritable) udf.evaluate(args); - Assert.assertEquals(new Float(4.5), new Float(res.get())); + Assert.assertEquals(4.5, res.get(), EPSILON); } @Test @@ -179,7 +181,7 @@ public void testDoulePlusDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(309.49), new Double(res.get())); + Assert.assertEquals(309.49, res.get(), EPSILON); } @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java index 6d464da1d4..53bdc47b19 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPositive.java @@ -46,6 +46,8 @@ public class TestGenericUDFOPPositive { + private static final double EPSILON = 1E-6; + @Test public void testByte() throws HiveException { GenericUDFOPPositive udf = new GenericUDFOPPositive(); @@ -133,7 +135,7 @@ public void testFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.floatTypeInfo, oi.getTypeInfo()); FloatWritable res = (FloatWritable) udf.evaluate(args); - Assert.assertEquals(new Float(323.4747f), new Float(res.get())); + Assert.assertEquals(323.4747, res.get(), EPSILON); } @Test @@ -151,7 +153,7 @@ public void testDouble() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(32300.004747), new Double(res.get())); + Assert.assertEquals(32300.004747, res.get(), EPSILON); } @Test @@ -188,7 +190,7 @@ public void testString() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(32300.004747), new Double(res.get())); + Assert.assertEquals(32300.004747, res.get(), EPSILON); } @Test @@ -208,7 +210,7 @@ public void testVarchar() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(32300.004747), new Double(res.get())); + Assert.assertEquals(32300.004747, res.get(), EPSILON); } @Test @@ -228,7 +230,7 @@ public void testChar() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(32300.004747), new Double(res.get())); + Assert.assertEquals(32300.004747, res.get(), EPSILON); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java index 2812666cee..ba5033fd97 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPower.java @@ -39,6 +39,8 @@ public class TestGenericUDFPower { + private static final double EPSILON = 1E-10; + @Test public void testBytePowerShort() throws HiveException { GenericUDFPower udf = new GenericUDFPower(); @@ -57,7 +59,7 @@ public void testBytePowerShort() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(16), new Double(res.get())); + Assert.assertEquals(16, res.get(), EPSILON); } @Test @@ -79,7 +81,7 @@ public void testVarcharPowerInt() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(3.14 * 3.14), new Double(res.get())); + Assert.assertEquals(3.14 * 3.14, res.get(), EPSILON); } @Test @@ -100,7 +102,7 @@ public void testDoublePowerLong() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(4.5 * 4.5 * 4.5 * 4.5), new Double(res.get())); + Assert.assertEquals(4.5 * 4.5 * 4.5 * 4.5, res.get(), EPSILON); } @Test @@ -121,7 +123,7 @@ public void testLongPowerDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(1380.3842646028852), new Double(res.get())); + Assert.assertEquals(1380.3842646028852, res.get(), EPSILON); } @Test @@ -142,7 +144,7 @@ public void testFloatPowerFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(0.10475656017578482), new Double(res.get())); + Assert.assertEquals(0.10475656017578482, res.get(), EPSILON); } @Test @@ -163,7 +165,7 @@ public void testShortPowerFloat() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(0.009065844089438033), new Double(res.get())); + Assert.assertEquals(0.009065844089438033, res.get(), EPSILON); } @Test @@ -184,7 +186,7 @@ public void testDoulePowerDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals("Unexpected result", new Double(-4.52 * 4.52 * 4.52), new Double(res.get()), 1e-6); + Assert.assertEquals("Unexpected result", -4.52 * 4.52 * 4.52, res.get(), EPSILON); } @Test @@ -205,7 +207,7 @@ public void testDecimalPowerDecimal() throws HiveException { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); - Assert.assertEquals(new Double(1.9214203800477838E-4), new Double(res.get())); + Assert.assertEquals(1.9214203800477838E-4, res.get(), EPSILON); } } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java index efb4508f02..67c8f4547e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ColumnBuffer.java @@ -381,7 +381,7 @@ public void addValue(Type type, Object field) { break; case FLOAT_TYPE: nulls.set(size, field == null); - doubleVars()[size] = field == null ? 0 : new Double(field.toString()); + doubleVars()[size] = field == null ? 0.0 : Double.parseDouble(field.toString()); break; case DOUBLE_TYPE: nulls.set(size, field == null); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java index efc96931ca..6e0cf479be 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java @@ -100,7 +100,7 @@ public void testUnionStructObjectInspector() throws Throwable { struct1.add(true); ArrayList struct2 = new ArrayList(2); struct2.add(1.0); - struct2.add(new Long(111)); + struct2.add(Long.valueOf(111)); ArrayList struct = new ArrayList(2); struct.add(struct1); struct.add(struct2); diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/common/ndv/fm/FMSketch.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/common/ndv/fm/FMSketch.java index f6cdc4ce8e..0cf926f5c6 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/common/ndv/fm/FMSketch.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/common/ndv/fm/FMSketch.java @@ -118,6 +118,7 @@ public FMSketch(int numBitVectors) { /** * Resets a distinctValueEstimator object to its original state. */ + @Override public void reset() { for (int i=0; i< numBitVectors; i++) { bitVector[i].clear(); @@ -211,6 +212,7 @@ private int generateHashForPCSA(long v) { return hash; } + @Override public void addToEstimator(long v) { /* Update summary bitVector : * Generate hash value of the long value and mod it by 2^bitVectorSize-1. @@ -251,16 +253,18 @@ public void addToEstimatorPCSA(long v) { bitVector[hash%numBitVectors].set(index); } + @Override public void addToEstimator(double d) { - int v = new Double(d).hashCode(); + int v = Double.hashCode(d); addToEstimator(v); } public void addToEstimatorPCSA(double d) { - int v = new Double(d).hashCode(); + int v = Double.hashCode(d); addToEstimatorPCSA(v); } + @Override public void addToEstimator(HiveDecimal decimal) { int v = decimal.hashCode(); addToEstimator(v); @@ -297,6 +301,7 @@ public long estimateNumDistinctValuesPCSA() { /* We use the Flajolet-Martin estimator to estimate the number of distinct values.FM uses the * location of the least significant zero as an estimate of log2(phi*ndvs). */ + @Override public long estimateNumDistinctValues() { int sumLeastSigZero = 0; double avgLeastSigZero; @@ -333,6 +338,7 @@ static int lengthFor(JavaDataModel model, Integer numVector) { return length; } + @Override public int lengthFor(JavaDataModel model) { return lengthFor(model, getNumBitVectors()); } diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/PerfLogger.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/PerfLogger.java index c111343701..536e0c57f7 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/PerfLogger.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/PerfLogger.java @@ -73,7 +73,7 @@ public static void setPerfLogger(PerfLogger resetPerfLogger) { */ public void PerfLogBegin(String callerName, String method) { long startTime = System.currentTimeMillis(); - startTimes.put(method, new Long(startTime)); + startTimes.put(method, Long.valueOf(startTime)); if (LOG.isDebugEnabled()) { LOG.debug(""); } @@ -98,7 +98,7 @@ public long PerfLogEnd(String callerName, String method) { public long PerfLogEnd(String callerName, String method, String additionalInfo) { Long startTime = startTimes.get(method); long endTime = System.currentTimeMillis(); - endTimes.put(method, new Long(endTime)); + endTimes.put(method, Long.valueOf(endTime)); long duration = startTime == null ? -1 : endTime - startTime.longValue(); if (LOG.isDebugEnabled()) { -- 2.18.0