diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 62374ad..df1c5a6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -213,9 +213,7 @@ private boolean validateMRTask(MapRedTask mrTask) throws SemanticException { List> interfaceList = Arrays.asList(pd.getInputFileFormatClass().getInterfaces()); if (!interfaceList.contains(VectorizedInputFormatInterface.class)) { - LOG.debug("Input format: " + pd.getInputFileFormatClassName() - + ", doesn't provide vectorized input"); - System.err.println("Input format: " + pd.getInputFileFormatClassName() + LOG.info("Input format: " + pd.getInputFileFormatClassName() + ", doesn't provide vectorized input"); return false; } @@ -244,7 +242,7 @@ private boolean validateMRTask(MapRedTask mrTask) throws SemanticException { } private void vectorizeMRTask(MapRedTask mrTask) throws SemanticException { - System.err.println("Going down the vectorized path"); + LOG.info("Vectorizing task..."); MapWork mapWork = mrTask.getWork().getMapWork(); mapWork.setVectorMode(true); Map opRules = new LinkedHashMap(); @@ -278,7 +276,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } boolean ret = validateOperator(op); if (!ret) { - System.err.println("Operator: "+op.getName()+", could not be vectorized"); + LOG.info("Operator: "+op.getName()+" could not be vectorized."); return new Boolean(false); } }