Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -114,6 +114,7 @@ private int maxRows = 100; ByteStream.Output bos = new ByteStream.Output(); + private static final int NO_OF_THREADS_CAN_BE_LAUNCHED = 2000; private HiveConf conf; private DataInput resStream; @@ -127,7 +128,7 @@ // A limit on the number of threads that can be launched private int maxthreads; - private final int sleeptime = 2000; + private final int sleeptime = NO_OF_THREADS_CAN_BE_LAUNCHED; protected int tryCount = Integer.MAX_VALUE; private boolean checkLockManager() { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java (working copy) @@ -20,6 +20,8 @@ import java.io.Serializable; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -38,6 +40,8 @@ private static final long serialVersionUID = 1L; + private static transient final Log LOG = LogFactory.getLog(CopyTask.class); + public CopyTask() { super(); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -98,16 +98,18 @@ public static MemoryMXBean memoryMXBean; protected HadoopJobExecHelper jobExecHelper; + protected static transient final Log LOG = LogFactory.getLog(ExecDriver.class); + /** * Constructor when invoked from QL. */ public ExecDriver() { super(); - LOG = LogFactory.getLog(this.getClass().getName()); console = new LogHelper(LOG); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); } + @Override public boolean requireLock() { return true; } @@ -176,7 +178,6 @@ public ExecDriver(MapredWork plan, JobConf job, boolean isSilent) throws HiveException { setWork(plan); this.job = job; - LOG = LogFactory.getLog(this.getClass().getName()); console = new LogHelper(LOG, isSilent); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (working copy) @@ -23,6 +23,8 @@ import java.util.ArrayList; import java.util.Properties; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; @@ -52,6 +54,7 @@ private FetchOperator ftOp; private SerDe mSerde; private int totalRows; + private static transient final Log LOG = LogFactory.getLog(FetchTask.class); public FetchTask() { super(); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -20,7 +20,6 @@ import java.lang.reflect.Method; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -122,7 +121,6 @@ import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; import org.apache.hadoop.hive.ql.udf.UDFYear; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFBridge; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCollectSet; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFContextNGrams; @@ -130,6 +128,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovariance; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovarianceSample; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFHistogramNumeric; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax; @@ -147,13 +146,13 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayContains; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcatWS; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapEmpty; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFEWAHBitmapOr; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFElt; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFField; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash; @@ -210,7 +209,7 @@ */ public final class FunctionRegistry { - private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.exec.FunctionRegistry"); + private static Log LOG = LogFactory.getLog(FunctionRegistry.class); /** * The mapping from expression function names to expression classes. @@ -707,7 +706,7 @@ List mlist = new ArrayList(); - for (Method m : Arrays.asList(udfClass.getMethods())) { + for (Method m : udfClass.getMethods()) { if (m.getName().equals(methodName)) { mlist.add(m); } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (working copy) @@ -22,8 +22,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc; @@ -42,7 +42,7 @@ */ public class FunctionTask extends Task { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog("hive.ql.exec.FunctionTask"); + private static transient final Log LOG = LogFactory.getLog(FunctionTask.class); transient HiveConf conf; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (working copy) @@ -64,11 +64,12 @@ private Map fetchOperators; private JobConf job; - public static final Log l4j = LogFactory.getLog("MapredLocalTask"); + public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class); static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE"; static final String HADOOP_OPTS_KEY = "HADOOP_OPTS"; static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive"}; public static MemoryMXBean memoryMXBean; + private static final Log LOG = LogFactory.getLog(MapredLocalTask.class); // not sure we need this exec context; but all the operators in the work // will pass this context throught @@ -81,7 +82,6 @@ public MapredLocalTask(MapredLocalWork plan, JobConf job, boolean isSilent) throws HiveException { setWork(plan); this.job = job; - LOG = LogFactory.getLog(this.getClass().getName()); console = new LogHelper(LOG, isSilent); } @@ -97,6 +97,7 @@ return sdf.format(cal.getTime()); } + @Override public boolean requireLock() { return true; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy) @@ -28,6 +28,8 @@ import java.util.List; import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -35,8 +37,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -56,6 +58,7 @@ public class MoveTask extends Task implements Serializable { private static final long serialVersionUID = 1L; + private static transient final Log LOG = LogFactory.getLog(MoveTask.class); public MoveTask() { super(); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (working copy) @@ -26,6 +26,8 @@ import java.util.List; import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -54,6 +56,7 @@ public class StatsTask extends Task implements Serializable { private static final long serialVersionUID = 1L; + private static transient final Log LOG = LogFactory.getLog(StatsTask.class); private Table table; private List> dpPartSpecs; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (working copy) @@ -53,7 +53,6 @@ protected transient boolean queued; protected transient HiveConf conf; protected transient Hive db; - protected static transient Log LOG; protected transient LogHelper console; protected transient QueryPlan queryPlan; protected transient TaskHandle taskHandle; @@ -62,6 +61,7 @@ protected transient boolean clonedConf = false; protected Task backupTask; protected List> backupChildrenTasks = new ArrayList>(); + protected static transient Log LOG = LogFactory.getLog(Task.class); protected int taskTag; private boolean isLocalMode =false; private boolean retryCmdWhenFail = false; @@ -91,7 +91,6 @@ started = false; initialized = false; queued = false; - LOG = LogFactory.getLog(this.getClass().getName()); this.taskCounters = new HashMap(); taskTag = Task.NO_TAG; } @@ -474,7 +473,7 @@ public void setLocalMode(boolean isLocalMode) { this.isLocalMode = isLocalMode; } - + public boolean requireLock() { return false; } @@ -486,11 +485,11 @@ public void setRetryCmdWhenFail(boolean retryCmdWhenFail) { this.retryCmdWhenFail = retryCmdWhenFail; } - + public QueryPlan getQueryPlan() { return queryPlan; } - + public void setQueryPlan(QueryPlan queryPlan) { this.queryPlan = queryPlan; } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (working copy) @@ -87,6 +87,16 @@ private static final Log LOG = LogFactory.getLog(MapJoinProcessor.class.getName()); + private static final RuleRegExp MAPJOIN_RULE_REG_EXP = new RuleRegExp ( "R0", "MAPJOIN%" ); + + private static final RuleRegExp MAPJOIN_FS_RULE_REG_EXP = new RuleRegExp ( "R1", "MAPJOIN%.*FS%" ); + + private static final RuleRegExp MAPJOIN_RS_RULE_REG_EXP = new RuleRegExp ( "R2", "MAPJOIN%.*RS%" ); + + private static final RuleRegExp MAPJOIN_MAPJOIN_RULE_REG_EXP = new RuleRegExp ( "R3", "MAPJOIN%.*MAPJOIN%" ); + + private static final RuleRegExp MAPJOIN_UNION_RULE_REG_EXP = new RuleRegExp ( "R4", "MAPJOIN%.*UNION%" ); + private ParseContext pGraphContext; /** @@ -696,10 +706,10 @@ // the operator stack. // The dispatcher generates the plan from the operator tree Map opRules = new LinkedHashMap(); - opRules.put(new RuleRegExp("R0", "MAPJOIN%"), getCurrentMapJoin()); - opRules.put(new RuleRegExp("R1", "MAPJOIN%.*FS%"), getMapJoinFS()); - opRules.put(new RuleRegExp("R2", "MAPJOIN%.*RS%"), getMapJoinDefault()); - opRules.put(new RuleRegExp("R4", "MAPJOIN%.*UNION%"), getMapJoinDefault()); + opRules.put(MAPJOIN_RULE_REG_EXP, getCurrentMapJoin()); + opRules.put(MAPJOIN_FS_RULE_REG_EXP, getMapJoinFS()); + opRules.put(MAPJOIN_RS_RULE_REG_EXP, getMapJoinDefault()); + opRules.put(MAPJOIN_UNION_RULE_REG_EXP, getMapJoinDefault()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -26,7 +26,6 @@ import java.io.Serializable; import java.net.URI; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -121,7 +120,7 @@ * */ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.DDLSemanticAnalyzer"); + private static final Log LOG = LogFactory.getLog(DDLSemanticAnalyzer.class); private static final Map TokenToTypeName = new HashMap(); private final Set reservedPartitionValues; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (revision 1145025) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (working copy) @@ -36,7 +36,7 @@ */ public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer { private static final Log LOG = LogFactory - .getLog("hive.ql.parse.FunctionSemanticAnalyzer"); + .getLog(FunctionSemanticAnalyzer.class); public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf);