diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 7459bba..99cdb95 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -430,13 +430,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { cntr = 1; logEveryNRows = HiveConf.getLongVar(hconf, HiveConf.ConfVars.HIVE_LOG_N_RECORDS); - String suffix = Integer.toString(conf.getDestTableId()); - String fullName = conf.getTableInfo().getTableName(); - if (fullName != null) { - suffix = suffix + "_" + fullName.toLowerCase(); - } - - statsMap.put(Counter.RECORDS_OUT + "_" + suffix, row_count); + statsMap.put(getCounterName(Counter.RECORDS_OUT), row_count); } catch (HiveException e) { throw e; } catch (Exception e) { @@ -445,6 +439,15 @@ protected void initializeOp(Configuration hconf) throws HiveException { } } + public String getCounterName(Counter counter) { + String suffix = Integer.toString(conf.getDestTableId()); + String fullName = conf.getTableInfo().getTableName(); + if (fullName != null) { + suffix = suffix + "_" + fullName.toLowerCase(); + } + return counter + "_" + suffix; + } + private void logOutputFormatError(Configuration hconf, HiveException ex) { StringWriter errorWriter = new StringWriter(); errorWriter.append("Failed to create output format; configuration: "); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java index ef5ee95..f4730ec 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java @@ -164,11 +164,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { cntr = 1; logEveryNRows = HiveConf.getLongVar(hconf, HiveConf.ConfVars.HIVE_LOG_N_RECORDS); - String context = hconf.get(Operator.CONTEXT_NAME_KEY, ""); - if (context != null && !context.isEmpty()) { - context = "_" + context.replace(" ","_"); - } - statsMap.put(Counter.RECORDS_OUT_INTERMEDIATE + context, recordCounter); + statsMap.put(getCounterName(Counter.RECORDS_OUT_INTERMEDIATE, hconf), recordCounter); List keys = conf.getKeyCols(); @@ -250,6 +246,14 @@ protected void initializeOp(Configuration hconf) throws HiveException { } } + public String getCounterName(Counter counter, Configuration hconf) { + String context = hconf.get(Operator.CONTEXT_NAME_KEY, ""); + if (context != null && !context.isEmpty()) { + context = "_" + context.replace(" ", "_"); + } + return counter + context; + } + /** * Initializes array of ExprNodeEvaluator. Adds Union field for distinct diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java index 336d490..eaeffee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java @@ -384,11 +384,11 @@ private static StatsTask getStatsTaskInChildTasks(Task r for (Operator operator : work.getAllOperators()) { if (operator instanceof FileSinkOperator) { for (FileSinkOperator.Counter counter : FileSinkOperator.Counter.values()) { - hiveCounters.add(counter.toString()); + hiveCounters.add(((FileSinkOperator) operator).getCounterName(counter)); } } else if (operator instanceof ReduceSinkOperator) { for (ReduceSinkOperator.Counter counter : ReduceSinkOperator.Counter.values()) { - hiveCounters.add(counter.toString()); + hiveCounters.add(((ReduceSinkOperator) operator).getCounterName(counter, conf)); } } else if (operator instanceof ScriptOperator) { for (ScriptOperator.Counter counter : ScriptOperator.Counter.values()) {