diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java index f483e90..5b6dd6a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java @@ -350,8 +350,10 @@ public void setChildren(Configuration hconf) throws HiveException { for (String onealias : aliases) { Operator op = conf.getAliasToWork().get(onealias); - LOG.info("Adding alias " + onealias + " to work list for file " - + onefile); + if (LOG.isDebugEnabled()) { + LOG.debug("Adding alias " + onealias + " to work list for file " + + onefile); + } MapInputPath inp = new MapInputPath(onefile, onealias, op, partDesc); if (opCtxMap.containsKey(inp)) { continue; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 8359484..d565d01 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -298,10 +298,10 @@ private static BaseWork getBaseWork(Configuration conf, String name) { } if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) { - LOG.info("Loading plan from: "+path.toUri().getPath()); + LOG.debug("Loading plan from: "+path.toUri().getPath()); String planString = conf.get(path.toUri().getPath()); if (planString == null) { - LOG.info("Could not find plan!"); + LOG.debug("Could not find plan string in conf"); return null; } byte[] planBytes = Base64.decodeBase64(planString); @@ -332,6 +332,8 @@ private static BaseWork getBaseWork(Configuration conf, String name) { } } gWorkMap.put(path, gWork); + } else { + LOG.debug("Found plan in cache."); } return gWork; } catch (FileNotFoundException fnf) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java index d40b557..ea771f3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java @@ -72,18 +72,19 @@ void init(JobConf jconf, MRTaskReporter mrReporter, Map in // Allocate the bean at the beginning - memoryMXBean = ManagementFactory.getMemoryMXBean(); - l4j.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); isLogInfoEnabled = l4j.isInfoEnabled(); //log classpaths try { - l4j.info("conf classpath = " - + Arrays.asList(((URLClassLoader) jconf.getClassLoader()).getURLs())); - l4j.info("thread classpath = " - + Arrays.asList(((URLClassLoader) Thread.currentThread() - .getContextClassLoader()).getURLs())); + if (l4j.isDebugEnabled()) { + l4j.debug("conf classpath = " + + Arrays.asList(((URLClassLoader) jconf.getClassLoader()).getURLs())); + l4j.debug("thread classpath = " + + Arrays.asList(((URLClassLoader) Thread.currentThread() + .getContextClassLoader()).getURLs())); + } } catch (Exception e) { l4j.info("cannot get classpath: " + e.getMessage()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java index e60fa9e..661725e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java @@ -137,7 +137,7 @@ public void assign(VectorExpressionWriter[] writers, colNames = String.format("%s %s", colNames, colName); } - LOG.info(String.format("keyObjectInspector [%s]%s => %s", + LOG.debug(String.format("keyObjectInspector [%s]%s => %s", keyObjectInspector.getClass(), keyObjectInspector, colNames)); @@ -169,7 +169,7 @@ public void assign(VectorExpressionWriter[] writers, colNames = String.format("%s %s", colNames, colName); } - LOG.info(String.format("valueObjectInspector [%s]%s => %s", + LOG.debug(String.format("valueObjectInspector [%s]%s => %s", valueObjectInspector.getClass(), valueObjectInspector, colNames)); @@ -198,7 +198,7 @@ public void assign(VectorExpressionWriter[] writers, public void processOp(Object row, int tag) throws HiveException { VectorizedRowBatch vrg = (VectorizedRowBatch) row; - LOG.info(String.format("sinking %d rows, %d values, %d keys, %d parts", + LOG.debug(String.format("sinking %d rows, %d values, %d keys, %d parts", vrg.size, valueEval.length, keyEval.length, diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 38d8681..55890a7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -185,7 +185,7 @@ public static SearchArgument createSarg(List types, Configuration if (serializedPushdown == null || (conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR) == null && conf.get(serdeConstants.LIST_COLUMNS) == null)) { - LOG.info("No ORC pushdown predicate"); + LOG.debug("No ORC pushdown predicate"); return null; } SearchArgument sarg = SearchArgument.FACTORY.create @@ -197,7 +197,9 @@ public static SearchArgument createSarg(List types, Configuration public static String[] getIncludedColumnNames( List types, boolean[] includedColumns, Configuration conf) { String columnNamesString = conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR); - LOG.info("included columns names = " + columnNamesString); + if (LOG.isDebugEnabled()) { + LOG.debug("included columns names = " + columnNamesString); + } if (columnNamesString == null || conf.get(TableScanDesc.FILTER_EXPR_CONF_STR) == null) { return null; } @@ -508,13 +510,14 @@ private FileInfo verifyCachedFileInfo(FileStatus file) { } else { // Invalidate Context.footerCache.invalidate(file.getPath()); - LOG.info("Meta-Info for : " + file.getPath() + " changed. CachedModificationTime: " + if (LOG.isDebugEnabled()) { + LOG.debug("Meta-Info for : " + file.getPath() + " changed. CachedModificationTime: " + fileInfo.modificationTime + ", CurrentModificationTime: " + file.getModificationTime() + ", CachedLength: " + fileInfo.size + ", CurrentLength: " + file.getLen()); + } } } else { - LOG.info("Info not cached for path: " + file.getPath()); if (LOG.isDebugEnabled()) { LOG.debug("Info not cached for path: " + file.getPath()); }