Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1029845) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -200,6 +200,8 @@ HIVEMAPJOINBUCKETCACHESIZE("hive.mapjoin.bucket.cache.size", 100), HIVEMAPJOINROWSIZE("hive.mapjoin.size.key", 10000), HIVEMAPJOINCACHEROWS("hive.mapjoin.cache.numrows", 25000), + HIVEMAPJOINHASHINITCAPACITY("hive.mapjoin.hash.initcapacity", 1000000), + HIVEMAPJOINHASHLOADFACTOR("hive.mapjoin.hash.loadfactor", (float) 0.75), HIVEGROUPBYMAPINTERVAL("hive.groupby.mapaggr.checkinterval", 100000), HIVEMAPAGGRHASHMEMORY("hive.map.aggr.hash.percentmemory", (float) 0.5), HIVEMAPAGGRHASHMINREDUCTION("hive.map.aggr.hash.min.reduction", (float) 0.5), @@ -229,7 +231,7 @@ HIVEHWIWARFILE("hive.hwi.war.file", System.getenv("HWI_WAR_FILE")), // mapper/reducer memory in local mode - HIVEHADOOPMAXMEM("hive.mapred.local.mem", 0), + HIVEHADOOPMAXMEM("hive.mapred.local.mem", 1600), // test mode in hive mode HIVETESTMODE("hive.test.mode", false), Index: ql/src/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java =================================================================== --- ql/src/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java (revision 1029845) +++ ql/src/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java (working copy) @@ -6,12 +6,10 @@ package org.apache.hadoop.hive.ql.plan.api; -import java.util.Set; -import java.util.HashSet; -import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + import org.apache.thrift.IntRangeSet; -import java.util.Map; -import java.util.HashMap; public class OperatorType { public static final int JOIN = 0; @@ -30,8 +28,8 @@ public static final int UDTF = 13; public static final int LATERALVIEWJOIN = 14; public static final int LATERALVIEWFORWARD = 15; - public static final int JDBMDUMMY = 16; - public static final int JDBMSINK = 17; + public static final int HASHTABLEDUMMY = 16; + public static final int HASHTABLESINK = 17; public static final IntRangeSet VALID_VALUES = new IntRangeSet( JOIN, @@ -50,8 +48,8 @@ UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, - JDBMDUMMY, - JDBMSINK); + HASHTABLEDUMMY, + HASHTABLESINK); public static final Map VALUES_TO_NAMES = new HashMap() {{ put(JOIN, "JOIN"); @@ -70,7 +68,7 @@ put(UDTF, "UDTF"); put(LATERALVIEWJOIN, "LATERALVIEWJOIN"); put(LATERALVIEWFORWARD, "LATERALVIEWFORWARD"); - put(JDBMDUMMY, "JDBMDUMMY"); - put(JDBMSINK, "JDBMSINK"); + put(HASHTABLEDUMMY, "HASHTABLEDUMMY"); + put(HASHTABLESINK, "HASHTABLESINK"); }}; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java (working copy) @@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; @@ -69,7 +70,6 @@ transient boolean firstRow; - private static final int NOTSKIPBIGTABLE = -1; public AbstractMapJoinOperator() { } @@ -149,4 +149,21 @@ return false; } + // returns true if there are elements in key list and any of them is null + protected boolean hasAnyNulls(Object[] key) { + if (key != null && key.length> 0) { + for (Object k : key) { + if (k == null) { + return true; + } + } + } + return false; + } + + // returns true if there are elements in key list and any of them is null + protected boolean hasAnyNulls(AbstractMapJoinKey key) { + return key.hasAnyNulls(); + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (working copy) @@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; @@ -143,7 +144,7 @@ // input is too large // to fit in memory - HashMap>> storage; // map b/w table alias + HashMap>> storage; // map b/w table alias // to RowContainer int joinEmitInterval = -1; int joinCacheSize = 0; @@ -154,7 +155,7 @@ protected transient int countAfterReport; protected transient int heartbeatInterval; - private static final int NOTSKIPBIGTABLE = -1; + protected static final int NOTSKIPBIGTABLE = -1; public CommonJoinOperator() { } @@ -228,7 +229,7 @@ totalSz = 0; // Map that contains the rows for each alias - storage = new HashMap>>(); + storage = new HashMap>>(); numAliases = conf.getExprs().size(); @@ -340,7 +341,7 @@ public void startGroup() throws HiveException { LOG.trace("Join: Starting new group"); newGroupStarted = true; - for (RowContainer> alw : storage.values()) { + for (AbstractRowContainer> alw : storage.values()) { alw.clear(); } } @@ -664,7 +665,7 @@ if (aliasNum < numAliases) { // search for match in the rhs table - RowContainer> aliasRes = storage.get(order[aliasNum]); + AbstractRowContainer> aliasRes = storage.get(order[aliasNum]); for (ArrayList newObj = aliasRes.first(); newObj != null; newObj = aliasRes .next()) { @@ -735,7 +736,7 @@ return; } - RowContainer> alias = storage.get(order[aliasNum]); + AbstractRowContainer> alias = storage.get(order[aliasNum]); for (ArrayList row = alias.first(); row != null; row = alias.next()) { intObj.pushObj(row); genUniqueJoinObject(aliasNum + 1, intObj); @@ -755,7 +756,7 @@ boolean hasNulls = false; // Will be true if there are null entries for (int i = 0; i < numAliases; i++) { Byte alias = order[i]; - RowContainer> alw = storage.get(alias); + AbstractRowContainer> alw = storage.get(alias); if (alw.size() == 0) { alw.add((ArrayList) dummyObj[i]); hasNulls = true; @@ -776,7 +777,7 @@ // does any result need to be emitted for (int i = 0; i < numAliases; i++) { Byte alias = order[i]; - RowContainer> alw = storage.get(alias); + AbstractRowContainer> alw = storage.get(alias); if (alw.size() == 0) { if (noOuterJoin) { LOG.trace("No data for alias=" + i); @@ -831,7 +832,7 @@ @Override public void closeOp(boolean abort) throws HiveException { LOG.trace("Join Op close"); - for (RowContainer> alw : storage.values()) { + for (AbstractRowContainer> alw : storage.values()) { if (alw != null) { alw.clear(); // clean up the temp files } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java (working copy) @@ -21,12 +21,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.JDBMDummyDesc; +import org.apache.hadoop.hive.ql.plan.HashTableDummyDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.serde2.Deserializer; -public class JDBMDummyOperator extends Operator implements Serializable{ +public class HashTableDummyOperator extends Operator implements Serializable{ @Override protected void initializeOp(Configuration hconf) throws HiveException { @@ -55,12 +55,12 @@ } @Override public String getName() { - return "JDBMDUMMY"; + return "HASHTABLEDUMMY"; } @Override public int getType() { - return OperatorType.JDBMDUMMY; + return OperatorType.HASHTABLEDUMMY; } } Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java ___________________________________________________________________ Added: svn:mergeinfo Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (working copy) @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.exec; -import java.io.IOException; +import java.io.File; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; @@ -31,17 +31,20 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; -import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectKey; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectValue; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinRowContainer; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.JDBMSinkDesc; +import org.apache.hadoop.hive.ql.plan.HashTableSinkDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; +import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.util.JoinUtil; +import org.apache.hadoop.hive.ql.util.PathUtil; +import org.apache.hadoop.hive.ql.util.TimeUtil; import org.apache.hadoop.hive.serde2.SerDe; -import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; @@ -49,10 +52,10 @@ import org.apache.hadoop.util.ReflectionUtils; -public class JDBMSinkOperator extends TerminalOperator +public class HashTableSinkOperator extends TerminalOperator implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(JDBMSinkOperator.class + private static final Log LOG = LogFactory.getLog(HashTableSinkOperator.class .getName()); //from abstract map join operator @@ -110,10 +113,16 @@ protected transient Byte alias; protected transient Map spillTableDesc; // spill tables are - protected transient Map> mapJoinTables; + protected transient Map> mapJoinTables; protected transient boolean noOuterJoin; - public static class JDBMSinkObjectCtx { + private long rowNumber=0; + protected transient LogHelper console; + + private boolean isAbort; + + + public static class HashTableSinkObjectCtx { ObjectInspector standardOI; SerDe serde; TableDesc tblDesc; @@ -123,7 +132,7 @@ * @param standardOI * @param serde */ - public JDBMSinkObjectCtx(ObjectInspector standardOI, SerDe serde, + public HashTableSinkObjectCtx(ObjectInspector standardOI, SerDe serde, TableDesc tblDesc, Configuration conf) { this.standardOI = standardOI; this.serde = serde; @@ -160,17 +169,21 @@ "Mapside join size exceeds hive.mapjoin.maxsize. " + "Please increase that or remove the mapjoin hint." }; - transient int metadataKeyTag; + private final int metadataKeyTag = -1; transient int[] metadataValueTag; transient int maxMapJoinSize; - public JDBMSinkOperator(){ + public HashTableSinkOperator(){ //super(); + console = new LogHelper(LOG); + isAbort=false; } - public JDBMSinkOperator(MapJoinOperator mjop){ - this.conf = new JDBMSinkDesc(mjop.getConf()); + public HashTableSinkOperator(MapJoinOperator mjop){ + this.conf = new HashTableSinkDesc(mjop.getConf()); + console = new LogHelper(LOG); + isAbort= false; } @@ -214,9 +227,6 @@ JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(),order,posBigTableAlias); joinFilterObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinValues,inputObjInspectors,posBigTableAlias); - - - if (noOuterJoin) { rowContainerStandardObjectInspectors = joinValuesStandardObjectInspectors; } else { @@ -242,7 +252,7 @@ metadataValueTag[pos] = -1; } - mapJoinTables = new HashMap>(); + mapJoinTables = new HashMap>(); // initialize the hash tables for other tables for (Byte pos:order) { @@ -250,10 +260,14 @@ continue; } - int cacheSize = HiveConf.getIntVar(hconf, - HiveConf.ConfVars.HIVEMAPJOINCACHEROWS); - HashMapWrapper hashTable = new HashMapWrapper( - cacheSize); + int capacity = HiveConf.getIntVar(hconf, + HiveConf.ConfVars.HIVEMAPJOINHASHINITCAPACITY); + float loadFactor = HiveConf.getFloatVar(hconf, + HiveConf.ConfVars.HIVEMAPJOINHASHLOADFACTOR); + + HashMapWrapper hashTable = + new HashMapWrapper(capacity,loadFactor); + mapJoinTables.put(pos, hashTable); } @@ -280,6 +294,22 @@ } + public void generateMapMetaData() throws Exception{ + TableDesc keyTableDesc = conf.getKeyTblDesc(); + SerDe keySerializer = (SerDe) ReflectionUtils.newInstance( + keyTableDesc.getDeserializerClass(), null); + keySerializer.initialize(null, keyTableDesc.getProperties()); + + MapJoinMetaData.clear(); + MapJoinMetaData.put(Integer.valueOf(metadataKeyTag), + new HashTableSinkObjectCtx( + ObjectInspectorUtils + .getStandardObjectInspector(keySerializer + .getObjectInspector(), + ObjectInspectorCopyOption.WRITABLE), keySerializer, + keyTableDesc, hconf)); + } + /* * This operator only process small tables * Read the key/value pairs @@ -289,106 +319,70 @@ public void processOp(Object row, int tag) throws HiveException{ //let the mapJoinOp process these small tables try{ + if(firstRow){ + //generate the map metadata + generateMapMetaData(); + firstRow = false; + } alias = order[tag]; //alias = (byte)tag; // compute keys and values as StandardObjects - ArrayList key = JoinUtil.computeKeys(row, joinKeys.get(alias), + AbstractMapJoinKey keyMap= JoinUtil.computeMapJoinKeys(row, joinKeys.get(alias), joinKeysObjectInspectors.get(alias)); - ArrayList value = JoinUtil.computeValues(row, joinValues.get(alias), + Object[] value = JoinUtil.computeMapJoinValues(row, joinValues.get(alias), joinValuesObjectInspectors.get(alias),joinFilters.get(alias), joinFilterObjectInspectors.get(alias), noOuterJoin); - if (firstRow) { - metadataKeyTag = -1; + HashMapWrapper hashTable = mapJoinTables.get((byte) tag); - TableDesc keyTableDesc = conf.getKeyTblDesc(); - SerDe keySerializer = (SerDe) ReflectionUtils.newInstance( - keyTableDesc.getDeserializerClass(), null); - keySerializer.initialize(null, keyTableDesc.getProperties()); - - MapJoinMetaData.clear(); - MapJoinMetaData.put(Integer.valueOf(metadataKeyTag), - new JDBMSinkObjectCtx( - ObjectInspectorUtils - .getStandardObjectInspector(keySerializer - .getObjectInspector(), - ObjectInspectorCopyOption.WRITABLE), keySerializer, - keyTableDesc, hconf)); - - firstRow = false; - } - - numMapRowsRead++; - - if ((numMapRowsRead > maxMapJoinSize)&& (counterNameToEnum != null)) { - // update counter - LOG - .warn("Too many rows in map join tables. Fatal error counter will be incremented!!"); - incrCounter(fatalErrorCntr, 1); - fatalError = true; - return; - } - - HashMapWrapper hashTable = mapJoinTables.get((byte) tag); - MapJoinObjectKey keyMap = new MapJoinObjectKey(metadataKeyTag, key); MapJoinObjectValue o = hashTable.get(keyMap); - RowContainer res = null; + MapJoinRowContainer res = null; boolean needNewKey = true; if (o == null) { int bucketSize = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEMAPJOINBUCKETCACHESIZE); - res = JoinUtil.getRowContainer(hconf, - rowContainerStandardObjectInspectors.get((byte)tag), - order[tag], bucketSize,spillTableDesc,conf,noOuterJoin); + res = new MapJoinRowContainer(); res.add(value); - } else { - res = o.getObj(); - res.add(value); + + if (metadataValueTag[tag] == -1) { + metadataValueTag[tag] = order[tag]; + + TableDesc valueTableDesc = conf.getValueTblDescs().get(tag); + SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc + .getDeserializerClass(), null); + valueSerDe.initialize(null, valueTableDesc.getProperties()); - if (hashTable.cacheSize() > 0) { - o.setObj(res); - needNewKey = false; + MapJoinMetaData.put(Integer.valueOf(metadataValueTag[tag]), + new HashTableSinkObjectCtx(ObjectInspectorUtils + .getStandardObjectInspector(valueSerDe.getObjectInspector(), + ObjectInspectorCopyOption.WRITABLE), valueSerDe, + valueTableDesc, hconf)); } - } + + // Construct externalizable objects for key and value + if (needNewKey) { + MapJoinObjectValue valueObj = new MapJoinObjectValue( + metadataValueTag[tag], res); - if (metadataValueTag[tag] == -1) { - metadataValueTag[tag] = order[tag]; + rowNumber++; + isAbort = hashTable.put(keyMap, valueObj); + } - TableDesc valueTableDesc = conf.getValueTblDescs().get(tag); - SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc - .getDeserializerClass(), null); - valueSerDe.initialize(null, valueTableDesc.getProperties()); + } else { + res = o.getObj(); + res.add(value); + } - MapJoinMetaData.put(Integer.valueOf(metadataValueTag[tag]), - new JDBMSinkObjectCtx(ObjectInspectorUtils - .getStandardObjectInspector(valueSerDe.getObjectInspector(), - ObjectInspectorCopyOption.WRITABLE), valueSerDe, - valueTableDesc, hconf)); - } + /* if(numMapRowsRead >1000 && numMapRowsRead%1000 ==0 ){ + console.printInfo("Processed rows:\t"+numMapRowsRead); - // Construct externalizable objects for key and value - if (needNewKey) { - MapJoinObjectKey keyObj = new MapJoinObjectKey(metadataKeyTag, key); - MapJoinObjectValue valueObj = new MapJoinObjectValue( - metadataValueTag[tag], res); + }*/ - //valueObj.setConf(hconf); - valueObj.setConf(hconf); - // This may potentially increase the size of the hashmap on the mapper - if (res.size() > mapJoinRowsKey) { - if (res.size() % 100 == 0) { - LOG.warn("Number of values for a given key " + keyObj + " are " - + res.size()); - LOG.warn("used memory " + Runtime.getRuntime().totalMemory()); - } - } - hashTable.put(keyObj, valueObj); - } - }catch (SerDeException e) { + }catch (Exception e) { e.printStackTrace(); throw new HiveException(e); } @@ -397,46 +391,51 @@ @Override - /* - * Flush the hashtable into jdbm file - * Load this jdbm file into HDFS only - */ + public void closeOp(boolean abort) throws HiveException{ try{ if(mapJoinTables != null) { //get tmp file URI String tmpURI = this.getExecContext().getLocalWork().getTmpFileURI(); LOG.info("Get TMP URI: "+tmpURI); - - for (Map.Entry> hashTables : mapJoinTables.entrySet()) { + long fileLength; + for (Map.Entry> hashTables : mapJoinTables.entrySet()) { //get the key and value Byte tag = hashTables.getKey(); HashMapWrapper hashTable = hashTables.getValue(); - //get the jdbm file and path - String jdbmFile = hashTable.flushMemoryCacheToPersistent(); - Path localPath = new Path(jdbmFile); - //get current input file name String bigBucketFileName = this.getExecContext().getCurrentBigBucketFile(); if(bigBucketFileName == null ||bigBucketFileName.length()==0) { bigBucketFileName="-"; } //get the tmp URI path; it will be a hdfs path if not local mode - Path tmpURIPath = new Path(tmpURI+Path.SEPARATOR+"-"+tag+"-"+bigBucketFileName+".jdbm"); + String tmpURIPath = PathUtil.generatePath(tmpURI, tag, bigBucketFileName); + console.printInfo(TimeUtil.now()+"\tDump the hashtable into file: "+tmpURIPath); + //get the jdbm file and path + Path path = new Path(tmpURIPath); + FileSystem fs = path.getFileSystem(hconf); + File file = new File(path.toUri().getPath()); + fs.create(path); - //upload jdbm file to this HDFS - FileSystem fs = tmpURIPath.getFileSystem(this.getExecContext().getJc()); - fs.copyFromLocalFile(localPath, tmpURIPath); - LOG.info("Upload 1 JDBM File to: "+tmpURIPath); - //remove the original jdbm tmp file + fileLength= hashTable.flushMemoryCacheToPersistent(file); + console.printInfo(TimeUtil.now()+"\t File size: "+fileLength); + + + LOG.info("Upload 1 File to: "+tmpURIPath); + LOG.info("File Size: "+fileLength); + + console.printInfo("Processing rows: "+rowNumber+"\t key number:"+hashTable.size()); + console.printInfo("Upload 1 File to: "+tmpURIPath); + console.printInfo("File Size: "+fileLength); + hashTable.close(); } } super.closeOp(abort); - }catch(IOException e){ - LOG.error("Copy local file to HDFS error"); + }catch(Exception e){ + LOG.error("Generate Hashtable error"); e.printStackTrace(); } } @@ -453,31 +452,9 @@ @Override public int getType() { - return OperatorType.JDBMSINK; + return OperatorType.HASHTABLESINK; } - private void getPersistentFilePath(Map paths) throws HiveException{ - Map jdbmFilePaths = paths; - try{ - if(mapJoinTables != null) { - for (Map.Entry> hashTables : mapJoinTables.entrySet()) { - //hashTable.close(); - - Byte key = hashTables.getKey(); - HashMapWrapper hashTable = hashTables.getValue(); - - //get the jdbm file and path - String jdbmFile = hashTable.flushMemoryCacheToPersistent(); - Path localPath = new Path(jdbmFile); - //insert into map - jdbmFilePaths.put(key, localPath); - } - } - }catch (Exception e){ - LOG.fatal("Get local JDBM file error"); - e.printStackTrace(); - } - } } Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java ___________________________________________________________________ Added: svn:mergeinfo Index: ql/src/java/org/apache/hadoop/hive/ql/exec/JDBMDummyOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/JDBMDummyOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/JDBMDummyOperator.java (working copy) @@ -1,66 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.exec; - -import java.io.Serializable; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.JDBMDummyDesc; -import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.ql.plan.api.OperatorType; -import org.apache.hadoop.hive.serde2.Deserializer; - -public class JDBMDummyOperator extends Operator implements Serializable{ - - @Override - protected void initializeOp(Configuration hconf) throws HiveException { - TableDesc tbl = this.getConf().getTbl(); - try{ - Deserializer serde = tbl.getDeserializerClass().newInstance(); - serde.initialize(hconf, tbl.getProperties()); - - - this.outputObjInspector = serde.getObjectInspector(); - initializeChildren(hconf); - }catch(Exception e){ - LOG.error("Generating output obj inspector from dummy object error"); - e.printStackTrace(); - } - - } - - @Override - public void processOp(Object row, int tag) throws HiveException{ - throw new HiveException(); - } - - @Override - public void closeOp(boolean abort) throws HiveException{ - } - @Override - public String getName() { - return "JDBMDUMMY"; - } - - @Override - public int getType() { - return OperatorType.JDBMDUMMY; - } - -} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/JDBMSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/JDBMSinkOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/JDBMSinkOperator.java (working copy) @@ -1,483 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.exec; - -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; -import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectKey; -import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectValue; -import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.JDBMSinkDesc; -import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.ql.plan.api.OperatorType; -import org.apache.hadoop.hive.ql.util.JoinUtil; -import org.apache.hadoop.hive.serde2.SerDe; -import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.util.ReflectionUtils; - - -public class JDBMSinkOperator extends TerminalOperator -implements Serializable { - private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(JDBMSinkOperator.class - .getName()); - - //from abstract map join operator - /** - * The expressions for join inputs's join keys. - */ - protected transient Map> joinKeys; - /** - * The ObjectInspectors for the join inputs's join keys. - */ - protected transient Map> joinKeysObjectInspectors; - /** - * The standard ObjectInspectors for the join inputs's join keys. - */ - protected transient Map> joinKeysStandardObjectInspectors; - - protected transient int posBigTableTag = -1; // one of the tables that is not in memory - protected transient int posBigTableAlias = -1; // one of the tables that is not in memory - transient int mapJoinRowsKey; // rows for a given key - - protected transient RowContainer> emptyList = null; - - transient int numMapRowsRead; - protected transient int totalSz; // total size of the composite object - transient boolean firstRow; - private boolean smallTablesOnly; - /** - * The filters for join - */ - protected transient Map> joinFilters; - - protected transient int numAliases; // number of aliases - /** - * The expressions for join outputs. - */ - protected transient Map> joinValues; - /** - * The ObjectInspectors for the join inputs. - */ - protected transient Map> joinValuesObjectInspectors; - /** - * The ObjectInspectors for join filters. - */ - protected transient Map> joinFilterObjectInspectors; - /** - * The standard ObjectInspectors for the join inputs. - */ - protected transient Map> joinValuesStandardObjectInspectors; - - protected transient - Map> rowContainerStandardObjectInspectors; - - protected transient Byte[] order; // order in which the results should - Configuration hconf; - protected transient Byte alias; - protected transient Map spillTableDesc; // spill tables are - - protected transient Map> mapJoinTables; - protected transient boolean noOuterJoin; - - public static class JDBMSinkObjectCtx { - ObjectInspector standardOI; - SerDe serde; - TableDesc tblDesc; - Configuration conf; - - /** - * @param standardOI - * @param serde - */ - public JDBMSinkObjectCtx(ObjectInspector standardOI, SerDe serde, - TableDesc tblDesc, Configuration conf) { - this.standardOI = standardOI; - this.serde = serde; - this.tblDesc = tblDesc; - this.conf = conf; - } - - /** - * @return the standardOI - */ - public ObjectInspector getStandardOI() { - return standardOI; - } - - /** - * @return the serde - */ - public SerDe getSerDe() { - return serde; - } - - public TableDesc getTblDesc() { - return tblDesc; - } - - public Configuration getConf() { - return conf; - } - - } - - private static final transient String[] FATAL_ERR_MSG = { - null, // counter value 0 means no error - "Mapside join size exceeds hive.mapjoin.maxsize. " - + "Please increase that or remove the mapjoin hint." - }; - transient int metadataKeyTag; - transient int[] metadataValueTag; - transient int maxMapJoinSize; - - - public JDBMSinkOperator(){ - //super(); - } - - public JDBMSinkOperator(MapJoinOperator mjop){ - this.conf = new JDBMSinkDesc(mjop.getConf()); - } - - - @Override - protected void initializeOp(Configuration hconf) throws HiveException { - - maxMapJoinSize = HiveConf.getIntVar(hconf, - HiveConf.ConfVars.HIVEMAXMAPJOINSIZE); - - numMapRowsRead = 0; - firstRow = true; - - //for small tables only; so get the big table position first - posBigTableTag = conf.getPosBigTable(); - - order = conf.getTagOrder(); - - posBigTableAlias=order[posBigTableTag]; - - //initialize some variables, which used to be initialized in CommonJoinOperator - numAliases = conf.getExprs().size(); - this.hconf = hconf; - totalSz = 0; - - noOuterJoin = conf.isNoOuterJoin(); - - //process join keys - joinKeys = new HashMap>(); - JoinUtil.populateJoinKeyValue(joinKeys, conf.getKeys(),order,posBigTableAlias); - joinKeysObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinKeys,inputObjInspectors,posBigTableAlias); - joinKeysStandardObjectInspectors = JoinUtil.getStandardObjectInspectors(joinKeysObjectInspectors,posBigTableAlias); - - //process join values - joinValues = new HashMap>(); - JoinUtil.populateJoinKeyValue(joinValues, conf.getExprs(),order,posBigTableAlias); - joinValuesObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinValues,inputObjInspectors,posBigTableAlias); - joinValuesStandardObjectInspectors = JoinUtil.getStandardObjectInspectors(joinValuesObjectInspectors,posBigTableAlias); - - //process join filters - joinFilters = new HashMap>(); - JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(),order,posBigTableAlias); - joinFilterObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinValues,inputObjInspectors,posBigTableAlias); - - - - - if (noOuterJoin) { - rowContainerStandardObjectInspectors = joinValuesStandardObjectInspectors; - } else { - Map> rowContainerObjectInspectors = - new HashMap>(); - for (Byte alias : order) { - if(alias == posBigTableAlias){ - continue; - } - ArrayList rcOIs = new ArrayList(); - rcOIs.addAll(joinValuesObjectInspectors.get(alias)); - // for each alias, add object inspector for boolean as the last element - rcOIs.add( - PrimitiveObjectInspectorFactory.writableBooleanObjectInspector); - rowContainerObjectInspectors.put(alias, rcOIs); - } - rowContainerStandardObjectInspectors = - getStandardObjectInspectors(rowContainerObjectInspectors); - } - - metadataValueTag = new int[numAliases]; - for (int pos = 0; pos < numAliases; pos++) { - metadataValueTag[pos] = -1; - } - - mapJoinTables = new HashMap>(); - - // initialize the hash tables for other tables - for (Byte pos:order) { - if (pos == posBigTableTag) { - continue; - } - - int cacheSize = HiveConf.getIntVar(hconf, - HiveConf.ConfVars.HIVEMAPJOINCACHEROWS); - HashMapWrapper hashTable = new HashMapWrapper( - cacheSize); - - mapJoinTables.put(pos, hashTable); - } - } - - - - protected static HashMap> getStandardObjectInspectors( - Map> aliasToObjectInspectors) { - HashMap> result = new HashMap>(); - for (Entry> oiEntry : aliasToObjectInspectors - .entrySet()) { - Byte alias = oiEntry.getKey(); - List oiList = oiEntry.getValue(); - ArrayList fieldOIList = new ArrayList( - oiList.size()); - for (int i = 0; i < oiList.size(); i++) { - fieldOIList.add(ObjectInspectorUtils.getStandardObjectInspector(oiList - .get(i), ObjectInspectorCopyOption.WRITABLE)); - } - result.put(alias, fieldOIList); - } - return result; - - } - - /* - * This operator only process small tables - * Read the key/value pairs - * Load them into hashtable - */ - @Override - public void processOp(Object row, int tag) throws HiveException{ - //let the mapJoinOp process these small tables - try{ - alias = order[tag]; - //alias = (byte)tag; - - // compute keys and values as StandardObjects - ArrayList key = JoinUtil.computeKeys(row, joinKeys.get(alias), - joinKeysObjectInspectors.get(alias)); - - ArrayList value = JoinUtil.computeValues(row, joinValues.get(alias), - joinValuesObjectInspectors.get(alias),joinFilters.get(alias), - joinFilterObjectInspectors.get(alias), noOuterJoin); - - - if (firstRow) { - metadataKeyTag = -1; - - TableDesc keyTableDesc = conf.getKeyTblDesc(); - SerDe keySerializer = (SerDe) ReflectionUtils.newInstance( - keyTableDesc.getDeserializerClass(), null); - keySerializer.initialize(null, keyTableDesc.getProperties()); - - MapJoinMetaData.clear(); - MapJoinMetaData.put(Integer.valueOf(metadataKeyTag), - new JDBMSinkObjectCtx( - ObjectInspectorUtils - .getStandardObjectInspector(keySerializer - .getObjectInspector(), - ObjectInspectorCopyOption.WRITABLE), keySerializer, - keyTableDesc, hconf)); - - firstRow = false; - } - - numMapRowsRead++; - - if ((numMapRowsRead > maxMapJoinSize)&& (counterNameToEnum != null)) { - // update counter - LOG - .warn("Too many rows in map join tables. Fatal error counter will be incremented!!"); - incrCounter(fatalErrorCntr, 1); - fatalError = true; - return; - } - - HashMapWrapper hashTable = mapJoinTables.get((byte) tag); - MapJoinObjectKey keyMap = new MapJoinObjectKey(metadataKeyTag, key); - MapJoinObjectValue o = hashTable.get(keyMap); - RowContainer res = null; - - boolean needNewKey = true; - if (o == null) { - int bucketSize = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEMAPJOINBUCKETCACHESIZE); - res = JoinUtil.getRowContainer(hconf, - rowContainerStandardObjectInspectors.get((byte)tag), - order[tag], bucketSize,spillTableDesc,conf,noOuterJoin); - - res.add(value); - } else { - res = o.getObj(); - res.add(value); - - if (hashTable.cacheSize() > 0) { - o.setObj(res); - needNewKey = false; - } - } - - if (metadataValueTag[tag] == -1) { - metadataValueTag[tag] = order[tag]; - - TableDesc valueTableDesc = conf.getValueTblDescs().get(tag); - SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc - .getDeserializerClass(), null); - valueSerDe.initialize(null, valueTableDesc.getProperties()); - - MapJoinMetaData.put(Integer.valueOf(metadataValueTag[tag]), - new JDBMSinkObjectCtx(ObjectInspectorUtils - .getStandardObjectInspector(valueSerDe.getObjectInspector(), - ObjectInspectorCopyOption.WRITABLE), valueSerDe, - valueTableDesc, hconf)); - } - - // Construct externalizable objects for key and value - if (needNewKey) { - MapJoinObjectKey keyObj = new MapJoinObjectKey(metadataKeyTag, key); - MapJoinObjectValue valueObj = new MapJoinObjectValue( - metadataValueTag[tag], res); - - //valueObj.setConf(hconf); - valueObj.setConf(hconf); - // This may potentially increase the size of the hashmap on the mapper - if (res.size() > mapJoinRowsKey) { - if (res.size() % 100 == 0) { - LOG.warn("Number of values for a given key " + keyObj + " are " - + res.size()); - LOG.warn("used memory " + Runtime.getRuntime().totalMemory()); - } - } - hashTable.put(keyObj, valueObj); - } - }catch (SerDeException e) { - e.printStackTrace(); - throw new HiveException(e); - } - - } - - - @Override - /* - * Flush the hashtable into jdbm file - * Load this jdbm file into HDFS only - */ - public void closeOp(boolean abort) throws HiveException{ - try{ - if(mapJoinTables != null) { - //get tmp file URI - String tmpURI = this.getExecContext().getLocalWork().getTmpFileURI(); - LOG.info("Get TMP URI: "+tmpURI); - - for (Map.Entry> hashTables : mapJoinTables.entrySet()) { - //get the key and value - Byte tag = hashTables.getKey(); - HashMapWrapper hashTable = hashTables.getValue(); - - //get the jdbm file and path - String jdbmFile = hashTable.flushMemoryCacheToPersistent(); - Path localPath = new Path(jdbmFile); - - //get current input file name - String bigBucketFileName = this.getExecContext().getCurrentBigBucketFile(); - if(bigBucketFileName == null ||bigBucketFileName.length()==0) { - bigBucketFileName="-"; - } - //get the tmp URI path; it will be a hdfs path if not local mode - Path tmpURIPath = new Path(tmpURI+Path.SEPARATOR+"-"+tag+"-"+bigBucketFileName+".jdbm"); - - //upload jdbm file to this HDFS - FileSystem fs = tmpURIPath.getFileSystem(this.getExecContext().getJc()); - fs.copyFromLocalFile(localPath, tmpURIPath); - LOG.info("Upload 1 JDBM File to: "+tmpURIPath); - //remove the original jdbm tmp file - hashTable.close(); - } - } - - super.closeOp(abort); - }catch(IOException e){ - LOG.error("Copy local file to HDFS error"); - e.printStackTrace(); - } - } - - /** - * Implements the getName function for the Node Interface. - * - * @return the name of the operator - */ - @Override - public String getName() { - return "JDBMSINK"; - } - - @Override - public int getType() { - return OperatorType.JDBMSINK; - } - - private void getPersistentFilePath(Map paths) throws HiveException{ - Map jdbmFilePaths = paths; - try{ - if(mapJoinTables != null) { - for (Map.Entry> hashTables : mapJoinTables.entrySet()) { - //hashTable.close(); - - Byte key = hashTables.getKey(); - HashMapWrapper hashTable = hashTables.getValue(); - - //get the jdbm file and path - String jdbmFile = hashTable.flushMemoryCacheToPersistent(); - Path localPath = new Path(jdbmFile); - - //insert into map - jdbmFilePaths.put(key, localPath); - } - } - }catch (Exception e){ - LOG.fatal("Get local JDBM file error"); - e.printStackTrace(); - } - } - -} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinMetaData.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinMetaData.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinMetaData.java (working copy) @@ -17,21 +17,23 @@ */ package org.apache.hadoop.hive.ql.exec; +import java.util.ArrayList; import java.util.HashMap; import java.util.Map; -import org.apache.hadoop.hive.ql.exec.JDBMSinkOperator.JDBMSinkObjectCtx; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.HashTableSinkObjectCtx; public class MapJoinMetaData { - static transient Map mapMetadata = new HashMap(); + static transient Map mapMetadata = new HashMap(); + static ArrayList list = new ArrayList(); public MapJoinMetaData(){ } - public static void put(Integer key, JDBMSinkObjectCtx value){ + public static void put(Integer key, HashTableSinkObjectCtx value){ mapMetadata.put(key, value); } - public static JDBMSinkObjectCtx get(Integer key){ + public static HashTableSinkObjectCtx get(Integer key){ return mapMetadata.get(key); } @@ -39,4 +41,9 @@ mapMetadata.clear(); } + public static ArrayList getList(){ + list.clear(); + return list; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (working copy) @@ -17,9 +17,7 @@ */ package org.apache.hadoop.hive.ql.exec; -import java.io.File; import java.io.Serializable; -import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; @@ -30,15 +28,17 @@ import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.exec.JDBMSinkOperator.JDBMSinkObjectCtx; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.HashTableSinkObjectCtx; +import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; -import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectKey; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectValue; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinRowContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.util.JoinUtil; +import org.apache.hadoop.hive.ql.util.PathUtil; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -55,7 +55,7 @@ .getName()); - protected transient Map> mapJoinTables; + protected transient Map> mapJoinTables; private static final transient String[] FATAL_ERR_MSG = { null, // counter value 0 means no error @@ -63,9 +63,7 @@ + "Please increase that or remove the mapjoin hint." }; - - - + protected transient Map>> rowContainerMap; transient int metadataKeyTag; transient int[] metadataValueTag; transient int maxMapJoinSize; @@ -93,22 +91,28 @@ metadataKeyTag = -1; bigTableAlias = order[posBigTable]; - mapJoinTables = new HashMap>(); - + mapJoinTables = new HashMap>(); + rowContainerMap = new HashMap>>(); // initialize the hash tables for other tables for (int pos = 0; pos < numAliases; pos++) { if (pos == posBigTable) { continue; } - int cacheSize = HiveConf.getIntVar(hconf, - HiveConf.ConfVars.HIVEMAPJOINCACHEROWS); - HashMapWrapper hashTable = new HashMapWrapper( - cacheSize); + int capacity = HiveConf.getIntVar(hconf, + HiveConf.ConfVars.HIVEMAPJOINHASHINITCAPACITY); + float loadFactor = HiveConf.getFloatVar(hconf, + HiveConf.ConfVars.HIVEMAPJOINHASHLOADFACTOR); + + HashMapWrapper hashTable = + new HashMapWrapper(capacity,loadFactor); mapJoinTables.put(Byte.valueOf((byte) pos), hashTable); + MapJoinRowContainer> rowContainer = new MapJoinRowContainer>(); + rowContainerMap.put(Byte.valueOf((byte) pos), rowContainer); } + } @@ -127,15 +131,15 @@ keyTableDesc.getDeserializerClass(), null); keySerializer.initialize(null, keyTableDesc.getProperties()); MapJoinMetaData.put(Integer.valueOf(metadataKeyTag), - new JDBMSinkObjectCtx( + new HashTableSinkObjectCtx( ObjectInspectorUtils .getStandardObjectInspector(keySerializer .getObjectInspector(), ObjectInspectorCopyOption.WRITABLE), keySerializer, keyTableDesc, hconf)); - //index for values is just alias - for (int tag = 0; tag < order.length; tag++) { + //index for values is just alias + for (int tag = 0; tag < order.length; tag++) { int alias = (int) order[tag]; if(alias == this.bigTableAlias){ @@ -149,7 +153,7 @@ valueSerDe.initialize(null, valueTableDesc.getProperties()); MapJoinMetaData.put(Integer.valueOf(alias), - new JDBMSinkObjectCtx(ObjectInspectorUtils + new HashTableSinkObjectCtx(ObjectInspectorUtils .getStandardObjectInspector(valueSerDe.getObjectInspector(), ObjectInspectorCopyOption.WRITABLE), valueSerDe, valueTableDesc, hconf)); @@ -159,12 +163,13 @@ private void loadJDBM() throws HiveException{ boolean localMode = HiveConf.getVar(hconf, HiveConf.ConfVars.HADOOPJT).equals("local"); String tmpURI =null; - HashMapWrapper hashtable; + HashMapWrapper hashtable; Byte pos; int alias; String currentInputFile = HiveConf.getVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME); + LOG.info("******* Load from HashTable File: input : "+ currentInputFile); String currentFileName; @@ -173,34 +178,30 @@ } else { currentFileName="-"; } - LOG.info("******* Filename : "+ currentFileName); + try{ if(localMode){ //load the jdbm file from tmp dir + LOG.info("******* Load from tmp file uri ***"); tmpURI= this.getExecContext().getLocalWork().getTmpFileURI(); - for(Map.Entry> entry: mapJoinTables.entrySet()){ + for(Map.Entry> entry: mapJoinTables.entrySet()){ pos = entry.getKey(); hashtable=entry.getValue(); - URI uri = new URI(tmpURI+Path.SEPARATOR+"-"+pos+"-"+currentFileName+".jdbm"); - LOG.info("\tLoad back 1 JDBM file from tmp file uri:"+uri.toString()); - Path path = new Path(tmpURI+Path.SEPARATOR+"-"+pos+"-"+currentFileName+".jdbm"); - LOG.info("\tLoad back 1 JDBM file from tmp file uri:"+path.toString()); + String filePath = PathUtil.generatePath(tmpURI, pos, currentFileName); + Path path = new Path(filePath); + LOG.info("\tLoad back 1 hashtable file from tmp file uri:"+path.toString()); - File jdbmFile = new File(path.toUri()); - hashtable.initilizePersistentHash(jdbmFile); + hashtable.initilizePersistentHash(path.toUri().getPath()); } }else{ //load the jdbm file from distributed cache + LOG.info("******* Load from distributed Cache ***:"); Path[] localFiles= DistributedCache.getLocalCacheFiles(this.hconf); - for(int i = 0;i> entry: mapJoinTables.entrySet()){ + for(Map.Entry> entry: mapJoinTables.entrySet()){ pos = entry.getKey(); hashtable=entry.getValue(); - String suffix="-"+pos+"-"+currentFileName+".jdbm"; + String suffix=PathUtil.generateFileName(pos, currentFileName); LOG.info("Looking for jdbm file with suffix: "+suffix); boolean found=false; @@ -209,11 +210,10 @@ if(path.toString().endsWith(suffix)){ LOG.info("Matching suffix with cached file:"+path.toString()); - File jdbmFile = new File(path.toString()); - LOG.info("\tInitializing the JDBM by cached file:"+path.toString()); - hashtable.initilizePersistentHash(jdbmFile); + LOG.info("\tInitializing the hashtable by cached file:"+path.toString()); + hashtable.initilizePersistentHash(path.toString()); found = true; - LOG.info("\tLoad back 1 JDBM file from distributed cache:"+path.toString()); + LOG.info("\tLoad back 1 hashtable file from distributed cache:"+path.toString()); break; } } @@ -222,6 +222,7 @@ throw new HiveException(); } } + LOG.info("******* End of loading *******:"); } }catch (Exception e){ @@ -256,7 +257,7 @@ } // compute keys and values as StandardObjects - ArrayList key = JoinUtil.computeKeys(row, joinKeys.get(alias), + AbstractMapJoinKey key = JoinUtil.computeMapJoinKeys(row, joinKeys.get(alias), joinKeysObjectInspectors.get(alias)); ArrayList value = JoinUtil.computeValues(row, joinValues.get(alias), joinValuesObjectInspectors.get(alias), joinFilters.get(alias), @@ -268,18 +269,20 @@ for (Byte pos : order) { if (pos.intValue() != tag) { - MapJoinObjectKey keyMap = new MapJoinObjectKey(metadataKeyTag, key); - MapJoinObjectValue o = mapJoinTables.get(pos).getMapJoinValueObject(keyMap); + + MapJoinObjectValue o = mapJoinTables.get(pos).get(key); + MapJoinRowContainer> rowContainer = rowContainerMap.get(pos); // there is no join-value or join-key has all null elements - if (o == null || (hasAnyNulls(key))) { + if (o == null ||key.hasAnyNulls()) { if (noOuterJoin) { storage.put(pos, emptyList); } else { storage.put(pos, dummyObjVectors[pos.intValue()]); } } else { - storage.put(pos, o.getObj()); + rowContainer.reset(o.getObj()); + storage.put(pos, rowContainer); } } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (working copy) @@ -28,8 +28,8 @@ import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.ForwardDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; -import org.apache.hadoop.hive.ql.plan.JDBMDummyDesc; -import org.apache.hadoop.hive.ql.plan.JDBMSinkDesc; +import org.apache.hadoop.hive.ql.plan.HashTableDummyDesc; +import org.apache.hadoop.hive.ql.plan.HashTableSinkDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.LateralViewForwardDesc; import org.apache.hadoop.hive.ql.plan.LateralViewJoinDesc; @@ -87,10 +87,10 @@ LateralViewJoinOperator.class)); opvec.add(new OpTuple(LateralViewForwardDesc.class, LateralViewForwardOperator.class)); - opvec.add(new OpTuple(JDBMDummyDesc.class, - JDBMDummyOperator.class)); - opvec.add(new OpTuple(JDBMSinkDesc.class, - JDBMSinkOperator.class)); + opvec.add(new OpTuple(HashTableDummyDesc.class, + HashTableDummyOperator.class)); + opvec.add(new OpTuple(HashTableSinkDesc.class, + HashTableSinkOperator.class)); } public static Operator get(Class opClass) { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java (working copy) @@ -164,7 +164,7 @@ // reset rowcontainer's serde, objectinspector, and tableDesc. for (int i = 0; i < numAliases; i++) { Byte alias = conf.getTagOrder()[i]; - RowContainer> rc = joinOp.storage.get(Byte + RowContainer> rc = (RowContainer>)joinOp.storage.get(Byte .valueOf((byte) i)); if (rc != null) { rc.setSerDe(tblSerializers.get((byte) i), skewKeysTableObjectInspector @@ -178,7 +178,7 @@ if (skewKeyInCurrentGroup) { String specPath = conf.getBigKeysDirMap().get((byte) currBigKeyTag); - RowContainer> bigKey = joinOp.storage.get(Byte + RowContainer> bigKey = (RowContainer>)joinOp.storage.get(Byte .valueOf((byte) currBigKeyTag)); Path outputPath = getOperatorOutputPath(specPath); FileSystem destFs = outputPath.getFileSystem(hconf); @@ -188,7 +188,7 @@ if (((byte) i) == currBigKeyTag) { continue; } - RowContainer> values = joinOp.storage.get(Byte + RowContainer> values = (RowContainer>)joinOp.storage.get(Byte .valueOf((byte) i)); if (values != null) { specPath = conf.getSmallKeysDirMap().get((byte) currBigKeyTag).get( @@ -216,7 +216,7 @@ skewKeyInCurrentGroup = false; for (int i = 0; i < numAliases; i++) { - RowContainer> rc = joinOp.storage.get(Byte + RowContainer> rc = (RowContainer>)joinOp.storage.get(Byte .valueOf((byte) i)); if (rc != null) { rc.setKeyObject(dummyKey); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractMapJoinKey.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractMapJoinKey.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractMapJoinKey.java (revision 0) @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.persistence; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; + +/** + * Map Join Object used for both key. + */ +public abstract class AbstractMapJoinKey implements Externalizable { + + protected static int metadataTag = -1; + + public AbstractMapJoinKey() { + } + + @Override + public abstract boolean equals(Object o) ; + + @Override + public abstract int hashCode() ; + + public abstract void readExternal(ObjectInput in) throws IOException, + ClassNotFoundException ; + + public abstract void writeExternal(ObjectOutput out) throws IOException ; + + public abstract boolean hasAnyNulls() ; + +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractMapJoinKey.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java (revision 0) @@ -0,0 +1,30 @@ +package org.apache.hadoop.hive.ql.exec.persistence; + +import org.apache.hadoop.hive.ql.metadata.HiveException; + +public abstract class AbstractRowContainer { + + public AbstractRowContainer() { + + } + + + public abstract void add(Row t) throws HiveException ; + + public abstract Row first() throws HiveException ; + + public abstract Row next() throws HiveException ; + + /** + * Get the number of elements in the RowContainer. + * + * @return number of elements in the RowContainer + */ + + public abstract int size() ; + /** + * Remove all elements in the RowContainer. + */ + + public abstract void clear() throws HiveException ; +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java (working copy) @@ -19,20 +19,22 @@ package org.apache.hadoop.hive.ql.exec.persistence; import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.management.ManagementFactory; +import java.lang.management.MemoryMXBean; +import java.text.NumberFormat; import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Properties; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.util.jdbm.RecordManager; -import org.apache.hadoop.hive.ql.util.jdbm.RecordManagerFactory; -import org.apache.hadoop.hive.ql.util.jdbm.RecordManagerOptions; -import org.apache.hadoop.hive.ql.util.jdbm.helper.FastIterator; -import org.apache.hadoop.hive.ql.util.jdbm.htree.HTree; +import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; /** * Simple wrapper for persistent Hashmap implementing only the @@ -41,35 +43,27 @@ * table exceeds a certain threshold, new elements will go into the persistent * hash table. */ -public class HashMapWrapper { +public class HashMapWrapper implements Serializable { protected Log LOG = LogFactory.getLog(this.getClass().getName()); // default threshold for using main memory based HashMap - private static final int THRESHOLD = 25000; + private static final int THRESHOLD = 1000000; + private static final float LOADFACTOR = 0.75f; - private int threshold; // threshold to put data into persistent hash table + private double threshold; // threshold to put data into persistent hash table // instead - private HashMap mHash; // main memory HashMap - private HTree pHash; // persistent HashMap - private RecordManager recman; // record manager required by HTree - private File tmpFile; // temp file holding the persistent data from record - // manager. - private MRU MRUList; // MRU cache entry + private HashMap mHash; // main memory HashMap + - /** - * Doubly linked list of value items. Note: this is only used along with - * memory hash table. Persistent hash stores the value directory. - */ - class MRUItem extends DCLLItem { - K key; - V value; + + protected transient LogHelper console; - MRUItem(K k, V v) { - key = k; - value = v; - } - } + private File dumpFile; + public static MemoryMXBean memoryMXBean; + private long maxMemory; + private long currentMemory; + private NumberFormat num ; /** * Constructor. @@ -78,397 +72,191 @@ * User specified threshold to store new values into persistent * storage. */ + public HashMapWrapper(int threshold,float loadFactor) { + this.threshold = 0.9; + mHash = new HashMap (threshold,loadFactor); + console = new LogHelper(LOG); + memoryMXBean = ManagementFactory.getMemoryMXBean(); + maxMemory = memoryMXBean.getHeapMemoryUsage().getMax(); + LOG.info("maximum memory: " + maxMemory); + num = NumberFormat.getInstance(); + num.setMinimumFractionDigits(2); + } + public HashMapWrapper(int threshold) { - this.threshold = threshold; - this.pHash = null; - this.recman = null; - this.tmpFile = null; - mHash = new HashMap(); - MRUList = new MRU(); + this(THRESHOLD,0.75f); } public HashMapWrapper() { - this(THRESHOLD); + this(THRESHOLD,LOADFACTOR); } - /** - * Get the value based on the key. this GET method will directly - * return the value from jdbm storage. - * @param key - * @return Value corresponding to the key. If the key is not found, return - * null. - */ -/* - public V getMapJoinValueObject(K key) throws HiveException{ - if(pHash == null) { - LOG.warn("the jdbm object is not ready!"); - throw new HiveException(); - } - try{ - V value = (V)pHash.get(key); - return value; - }catch(Exception e){ - throw new HiveException(e); - } - }*/ - /* - * In this get operation, the jdbm should read only - */ - public V getMapJoinValueObject(K key) throws HiveException { - V value = null; + public V get(K key) { + return mHash.get(key); + } - // if not the MRU, searching the main memory hash table. - MRUItem item = mHash.get(key); - if (item != null) { - value = item.value; - MRUList.moveToHead(item); - } else if (pHash != null) { - try { - value = (V) pHash.get(key); - if (value != null) { - if (mHash.size() < threshold) { - MRUItem itm= new MRUItem(key, value); - mHash.put(key, itm); - //pHash.remove(key); - MRUList.put(itm); - //recman.commit(); - } else if (threshold > 0) { // flush the LRU to disk - MRUItem tail = MRUList.tail(); // least recently used item - //pHash.put(tail.key, tail.value); - //pHash.remove(key); - //recman.commit(); + public boolean put(K key, V value) throws HiveException { + //isAbort(); + mHash.put(key, value); + return false; + } - // update mHash -- reuse MRUItem - item = mHash.remove(tail.key); - item.key = key; - item.value = value; - mHash.put(key, item); + public void remove(K key) { + mHash.remove(key); + } - // update MRU -- reusing MRUItem - tail.key = key; - tail.value = value; - MRUList.moveToHead(tail); - } - } - } catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } + private void checkDumpFileSize(){ + long fileSize = dumpFile.length(); + if(fileSize > 1000000){ + console.printInfo("JDBM file size (bytes): "+ fileSize); } - return value; } - public V get(K key) throws HiveException { - V value = null; - // if not the MRU, searching the main memory hash table. - MRUItem item = mHash.get(key); - if (item != null) { - value = item.value; - MRUList.moveToHead(item); - } else if (pHash != null) { - try { - value = (V) pHash.get(key); - if (value != null) { - if (mHash.size() < threshold) { - mHash.put(key, new MRUItem(key, value)); - pHash.remove(key); - } else if (threshold > 0) { // flush the LRU to disk - MRUItem tail = MRUList.tail(); // least recently used item - pHash.put(tail.key, tail.value); - pHash.remove(key); - recman.commit(); + /** + * Flush the main memory hash table into the persistent cache file + * + * @return persistent cache file + */ + public long flushMemoryCacheToPersistent(File file) throws IOException{ + ObjectOutputStream outputStream = null; + outputStream = new ObjectOutputStream(new FileOutputStream(file)); + outputStream.writeObject(mHash); + outputStream.flush(); + outputStream.close(); - // update mHash -- reuse MRUItem - item = mHash.remove(tail.key); - item.key = key; - item.value = value; - mHash.put(key, item); + return file.length(); + } - // update MRU -- reusing MRUItem - tail.key = key; - tail.value = value; - MRUList.moveToHead(tail); - } - } - } catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } - } - return value; + public void initilizePersistentHash(String fileName) throws IOException,ClassNotFoundException{ + ObjectInputStream inputStream = null; + inputStream = new ObjectInputStream(new FileInputStream(fileName)); + HashMap hashtable = (HashMap)inputStream.readObject(); + this.setMHash(hashtable); + + inputStream.close(); + } + + public int size(){ + return mHash.size(); + } + + public Set keySet(){ + return mHash.keySet(); } + /** - * Put the key value pair in the hash table. It will first try to put it into - * the main memory hash table. If the size exceeds the threshold, it will put - * it into the persistent hash table. + * Close the persistent hash table and clean it up. * - * @param key - * @param value * @throws HiveException */ - public void put(K key, V value) throws HiveException { - int mm_size = mHash.size(); - MRUItem itm = mHash.get(key); + public void close() throws HiveException { + isAbort(); + mHash.clear(); + } + + public void clear() throws HiveException { + mHash.clear(); + } + + public int getKeySize(){ + return mHash.size(); + } + + private boolean isAbort(){ + int size = mHash.size(); + //if(size >= 1000000 && size % 1000000 == 0 ){ + + long usedMemory = memoryMXBean.getHeapMemoryUsage().getUsed(); + double rate = (double)usedMemory/(double)maxMemory; + long mem1 = Runtime.getRuntime().totalMemory() -Runtime.getRuntime().freeMemory(); + console.printInfo("Hashtable size:\t"+size+"\tMemory usage:\t"+usedMemory+"\t rate:\t"+num.format(rate)); + System.gc(); - if (mm_size < threshold) { - if (itm != null) { - // re-use the MRU item -- just overwrite value, key is the same - itm.value = value; - MRUList.moveToHead(itm); - if (!mHash.get(key).value.equals(value)) { - LOG.error("HashMapWrapper.put() reuse MRUItem inconsistency [1]."); - } - assert (mHash.get(key).value.equals(value)); - } else { - // check if key already exists in pHash - try { - if (pHash != null && pHash.get(key) != null) { - // remove the old item from pHash and insert the new one - pHash.remove(key); - pHash.put(key, value); - recman.commit(); - return; - } - } catch (Exception e) { - e.printStackTrace(); - throw new HiveException(e); - } - itm = new MRUItem(key, value); - MRUList.put(itm); - mHash.put(key, itm); - } - } else { - if (itm != null) { // replace existing item - // re-use the MRU item -- just overwrite value, key is the same - itm.value = value; - MRUList.moveToHead(itm); - if (!mHash.get(key).value.equals(value)) { - LOG.error("HashMapWrapper.put() reuse MRUItem inconsistency [2]."); - } - assert (mHash.get(key).value.equals(value)); - } else { - // for items inserted into persistent hash table, we don't put it into - // MRU - if (pHash == null) { - pHash = getPersistentHash(); - } - try { - pHash.put(key, value); - recman.commit(); - } catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } - } + + if(rate > threshold){ + return false; } + + return false; + + } + public Log getLOG() { + return LOG; + } + + public void setLOG(Log log) { + LOG = log; } - public void putToJDBM(K key, V value) throws HiveException{ - if (pHash == null) { - pHash = getPersistentHash(); - } - try { - pHash.put(key, value); - recman.commit(); - } catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } + public double getThreshold() { + return threshold; + } + public void setThreshold(double threshold) { + this.threshold = threshold; } - /** - * Flush the main memory hash table into the persistent cache file - * - * @return persistent cache file - */ - public String flushMemoryCacheToPersistent() throws HiveException{ - try{ - //if no persistent cache file; create a new one - if(pHash == null){ - pHash = getPersistentHash(); - } - int mm_size = mHash.size(); - //no data in the memory cache - if(mm_size == 0){ - return tmpFile.getAbsolutePath(); - } - //iterate the memory hash table and put them into persistent file - for (Map.Entry entry : mHash.entrySet()) { - K key = entry.getKey(); - MRUItem item = entry.getValue(); - pHash.put(key, item.value); - } - //commit to the persistent file - recman.commit(); + public HashMap getMHash() { + return mHash; + } - //release the memory - mHash.clear(); + public void setMHash(HashMap hash) { + mHash = hash; + } - }catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } - return tmpFile.getAbsolutePath(); + public LogHelper getConsole() { + return console; } - public void initilizePersistentHash(File jdbmfile) throws HiveException{ - try{ - Properties props = new Properties(); - props.setProperty(RecordManagerOptions.CACHE_TYPE, - RecordManagerOptions.NORMAL_CACHE); - props.setProperty(RecordManagerOptions.DISABLE_TRANSACTIONS, "true"); + public void setConsole(LogHelper console) { + this.console = console; + } - recman = RecordManagerFactory.createRecordManager(jdbmfile, props); - long recid = recman.getNamedObject( "hashtable" ); - if ( recid != 0 ) { - pHash = HTree.load( recman, recid ); - }else{ - LOG.warn("initiliaze the hash table by jdbm file Error!"); - throw new HiveException(); - } + public File getDumpFile() { + return dumpFile; + } - } catch (Exception e) { - e.printStackTrace(); - LOG.warn(e.toString()); - throw new HiveException(e); - } + public void setDumpFile(File dumpFile) { + this.dumpFile = dumpFile; } - /** - * Get the persistent hash table. - * - * @return persistent hash table - * @throws HiveException - */ - private HTree getPersistentHash() throws HiveException { - try { - // Create a temporary file for the page manager to hold persistent data. - if (tmpFile != null) { - tmpFile.delete(); - } - tmpFile = File.createTempFile("HashMapWrapper", ".tmp", new File("/tmp")); - LOG.info("HashMapWrapper created temp file " + tmpFile.getAbsolutePath()); - // Delete the temp file if the JVM terminate normally through Hadoop job - // kill command. - // Caveat: it won't be deleted if JVM is killed by 'kill -9'. - tmpFile.deleteOnExit(); + public static MemoryMXBean getMemoryMXBean() { + return memoryMXBean; + } - Properties props = new Properties(); - props.setProperty(RecordManagerOptions.CACHE_TYPE, - RecordManagerOptions.NO_CACHE); - props.setProperty(RecordManagerOptions.DISABLE_TRANSACTIONS, "true"); + public static void setMemoryMXBean(MemoryMXBean memoryMXBean) { + HashMapWrapper.memoryMXBean = memoryMXBean; + } - recman = RecordManagerFactory.createRecordManager(tmpFile, props); - pHash = HTree.createInstance(recman); - recman.setNamedObject( "hashtable", pHash.getRecid() ); - //commit to the persistent file - recman.commit(); - } catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } - return pHash; + public long getMaxMemory() { + return maxMemory; } - /** - * Clean up the hash table. All elements in the main memory hash table will be - * removed, and the persistent hash table will be destroyed (temporary file - * will be deleted). - */ - public void clear() throws HiveException { - if (mHash != null) { - mHash.clear(); - MRUList.clear(); - } - close(); + public void setMaxMemory(long maxMemory) { + this.maxMemory = maxMemory; } - /** - * Remove one key-value pairs from the hash table based on the given key. If - * the pairs are removed from the main memory hash table, pairs in the - * persistent hash table will not be moved to the main memory hash table. - * Future inserted elements will go into the main memory hash table though. - * - * @param key - * @throws HiveException - */ - public void remove(Object key) throws HiveException { - MRUItem entry = mHash.remove(key); - if (entry != null) { - MRUList.remove(entry); - } else if (pHash != null) { - try { - pHash.remove(key); - } catch (Exception e) { - LOG.warn(e.toString()); - throw new HiveException(e); - } - } + public long getCurrentMemory() { + return currentMemory; } - /** - * Get a list of all keys in the hash map. - * - * @return - */ - public Set keySet() { - HashSet ret = null; - if (mHash != null) { - ret = new HashSet(); - ret.addAll(mHash.keySet()); - } - if (pHash != null) { - try { - FastIterator fitr = pHash.keys(); - if (fitr != null) { - K k; - while ((k = (K) fitr.next()) != null) { - ret.add(k); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - } - return ret; + public void setCurrentMemory(long currentMemory) { + this.currentMemory = currentMemory; } - /** - * Get the main memory cache capacity. - * - * @return the maximum number of items can be put into main memory HashMap - * cache. - */ - public int cacheSize() { - return threshold; + public NumberFormat getNum() { + return num; } - /** - * Close the persistent hash table and clean it up. - * - * @throws HiveException - */ - public void close() throws HiveException { + public void setNum(NumberFormat num) { + this.num = num; + } - if (pHash != null) { - try { - if (recman != null) { - recman.close(); - } - } catch (Exception e) { - throw new HiveException(e); - } - // delete the temporary file - if(tmpFile != null){ - tmpFile.delete(); - tmpFile = null; - } - pHash = null; - recman = null; - } + public static int getTHRESHOLD() { + return THRESHOLD; } + } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinDoubleKeys.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinDoubleKeys.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinDoubleKeys.java (revision 0) @@ -0,0 +1,168 @@ +package org.apache.hadoop.hive.ql.exec.persistence; + +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.util.ArrayList; + +import org.apache.hadoop.hive.ql.exec.MapJoinMetaData; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.HashTableSinkObjectCtx; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; +import org.apache.hadoop.io.Writable; + +public class MapJoinDoubleKeys extends AbstractMapJoinKey{ + + protected transient Object obj1; + protected transient Object obj2; + + + public MapJoinDoubleKeys() { + } + + /** + * @param metadataTag + * @param obj + */ + public MapJoinDoubleKeys(Object obj1,Object obj2) { + this.obj1 = obj1; + this.obj2 = obj2; + } + + @Override + public boolean equals(Object o) { + if (o instanceof MapJoinDoubleKeys) { + MapJoinDoubleKeys mObj = (MapJoinDoubleKeys) o; + Object key1 = mObj.getObj1(); + Object key2 = mObj.getObj2(); + + if ((obj1 == null) && (key1 == null)) { + if ((obj2 == null) && (key2 == null)) { + return true; + } + } + if ((obj1 != null) && (key1 != null)){ + if(obj1.equals(key1)){ + if ((obj2 != null) && (key2 != null)){ + if(obj2.equals(key2)){ + return true; + } + } + } + } + } + return false; + } + + @Override + public int hashCode() { + int hashCode=1; + if(obj1 == null){ + hashCode =metadataTag; + }else{ + hashCode += (31 + obj1.hashCode()); + } + if(obj2 == null){ + hashCode +=metadataTag; + }else{ + hashCode += (31 + obj2.hashCode()); + } + return hashCode; + } + + @Override + public void readExternal(ObjectInput in) throws IOException, + ClassNotFoundException { + try { + // get the tableDesc from the map stored in the mapjoin operator + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( + Integer.valueOf(metadataTag)); + + Writable val = ctx.getSerDe().getSerializedClass().newInstance(); + val.readFields(in); + + + + ArrayList list = (ArrayList) ObjectInspectorUtils.copyToStandardObject(ctx + .getSerDe().deserialize(val), ctx.getSerDe().getObjectInspector(),ObjectInspectorCopyOption.WRITABLE); + + if(list == null){ + obj1 = null; + obj2= null; + + }else{ + obj1 = list.get(0); + obj2 = list.get(1); + } + + } catch (Exception e) { + throw new IOException(e); + } + + } + + @Override + public void writeExternal(ObjectOutput out) throws IOException { + try { + //out.writeInt(metadataTag); + // get the tableDesc from the map stored in the mapjoin operator + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( + Integer.valueOf(metadataTag)); + + ArrayList list = MapJoinMetaData.getList(); + list.add(obj1); + list.add(obj2); + // Different processing for key and value + Writable outVal = ctx.getSerDe().serialize(list, ctx.getStandardOI()); + outVal.write(out); + + } catch (SerDeException e) { + throw new IOException(e); + } + } + + + + /** + * @return the obj + */ + public Object getObj1() { + return obj1; + } + + /** + * @param obj + * the obj to set + */ + public void setObj1(Object obj1) { + this.obj1 = obj1; + } + + /** + * @return the obj + */ + public Object getObj2() { + return obj2; + } + + /** + * @param obj + * the obj to set + */ + public void setObj2(Object obj2) { + this.obj2 = obj2; + } + + + @Override + public boolean hasAnyNulls(){ + if(obj1 == null) { + return true; + } + if(obj2 == null) { + return true; + } + return false; + } +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinDoubleKeys.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectKey.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectKey.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectKey.java (working copy) @@ -18,14 +18,13 @@ package org.apache.hadoop.hive.ql.exec.persistence; -import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import org.apache.hadoop.hive.ql.exec.MapJoinMetaData; -import org.apache.hadoop.hive.ql.exec.JDBMSinkOperator.JDBMSinkObjectCtx; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.HashTableSinkObjectCtx; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; @@ -34,10 +33,10 @@ /** * Map Join Object used for both key. */ -public class MapJoinObjectKey implements Externalizable { +public class MapJoinObjectKey extends AbstractMapJoinKey { + - protected transient int metadataTag; - protected transient ArrayList obj; + protected transient Object[] obj; public MapJoinObjectKey() { } @@ -46,8 +45,7 @@ * @param metadataTag * @param obj */ - public MapJoinObjectKey(int metadataTag, ArrayList obj) { - this.metadataTag = metadataTag; + public MapJoinObjectKey(Object[] obj) { this.obj = obj; } @@ -55,43 +53,60 @@ public boolean equals(Object o) { if (o instanceof MapJoinObjectKey) { MapJoinObjectKey mObj = (MapJoinObjectKey) o; - if (mObj.getMetadataTag() == metadataTag) { - if ((obj == null) && (mObj.getObj() == null)) { - return true; - } - if ((obj != null) && (mObj.getObj() != null) - && (mObj.getObj().equals(obj))) { + Object[] mObjArray = mObj.getObj(); + if ((obj == null) && (mObjArray == null)) { + return true; + } + if ((obj != null) && (mObjArray != null)){ + if(obj.length == mObjArray.length){ + for(int i = 0; i) ObjectInspectorUtils.copyToStandardObject(ctx + ArrayList list = (ArrayList) ObjectInspectorUtils.copyToStandardObject(ctx .getSerDe().deserialize(val), ctx.getSerDe().getObjectInspector(), ObjectInspectorCopyOption.WRITABLE); - if(obj == null){ - obj = new ArrayList(0); + if(list == null){ + obj = new ArrayList(0).toArray(); + }else{ + obj = list.toArray(); } + } catch (Exception e) { throw new IOException(e); } @@ -101,9 +116,9 @@ @Override public void writeExternal(ObjectOutput out) throws IOException { try { - out.writeInt(metadataTag); + //out.writeInt(metadataTag); // get the tableDesc from the map stored in the mapjoin operator - JDBMSinkObjectCtx ctx = MapJoinMetaData.get( + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( Integer.valueOf(metadataTag)); // Different processing for key and value @@ -114,25 +129,12 @@ } } - /** - * @return the metadataTag - */ - public int getMetadataTag() { - return metadataTag; - } - /** - * @param metadataTag - * the metadataTag to set - */ - public void setMetadataTag(int metadataTag) { - this.metadataTag = metadataTag; - } /** * @return the obj */ - public ArrayList getObj() { + public Object[] getObj() { return obj; } @@ -140,8 +142,21 @@ * @param obj * the obj to set */ - public void setObj(ArrayList obj) { + public void setObj(Object[] obj) { this.obj = obj; } + @Override + public boolean hasAnyNulls(){ + if (obj != null && obj.length> 0) { + for (Object k : obj) { + if (k == null) { + return true; + } + } + } + return false; + + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectValue.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectValue.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectValue.java (working copy) @@ -24,12 +24,8 @@ import java.io.ObjectOutput; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapJoinMetaData; -import org.apache.hadoop.hive.ql.exec.JDBMSinkOperator.JDBMSinkObjectCtx; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.HashTableSinkObjectCtx; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -41,20 +37,19 @@ public class MapJoinObjectValue implements Externalizable { protected transient int metadataTag; - protected transient RowContainer obj; - protected transient Configuration conf; - protected int bucketSize; // bucket size for RowContainer - protected Log LOG = LogFactory.getLog(this.getClass().getName()); + protected transient MapJoinRowContainer obj; + + public MapJoinObjectValue() { - bucketSize = 100; // default bucket size + } /** * @param metadataTag * @param obj */ - public MapJoinObjectValue(int metadataTag, RowContainer obj) { + public MapJoinObjectValue(int metadataTag, MapJoinRowContainer obj) { this.metadataTag = metadataTag; this.obj = obj; } @@ -63,6 +58,7 @@ public boolean equals(Object o) { if (o instanceof MapJoinObjectValue) { MapJoinObjectValue mObj = (MapJoinObjectValue) o; + if (mObj.getMetadataTag() == metadataTag) { if ((obj == null) && (mObj.getObj() == null)) { return true; @@ -89,13 +85,11 @@ metadataTag = in.readInt(); // get the tableDesc from the map stored in the mapjoin operator - JDBMSinkObjectCtx ctx = MapJoinMetaData.get( + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( Integer.valueOf(metadataTag)); int sz = in.readInt(); - RowContainer res = new RowContainer(bucketSize, ctx.getConf()); - res.setSerDe(ctx.getSerDe(), ctx.getStandardOI()); - res.setTableDesc(ctx.getTblDesc()); + MapJoinRowContainer res = new MapJoinRowContainer(); if (sz > 0) { int numCols = in.readInt(); if (numCols > 0) { @@ -108,12 +102,11 @@ .getSerDe().getObjectInspector(), ObjectInspectorCopyOption.WRITABLE); - res.add(memObj); + res.add(memObj.toArray()); } - } - else{ + }else{ for(int i = 0 ; i (0)); + res.add(new ArrayList(0).toArray()); } } } @@ -130,17 +123,17 @@ out.writeInt(metadataTag); // get the tableDesc from the map stored in the mapjoin operator - JDBMSinkObjectCtx ctx = MapJoinMetaData.get( + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( Integer.valueOf(metadataTag)); // Different processing for key and value - RowContainer> v = obj; + MapJoinRowContainer v = obj; out.writeInt(v.size()); if (v.size() > 0) { - ArrayList row = v.first(); - out.writeInt(row.size()); + Object[] row = v.first(); + out.writeInt(row.length); - if (row.size() > 0) { + if (row.length > 0) { for (; row != null; row = v.next()) { Writable outVal = ctx.getSerDe().serialize(row, ctx.getStandardOI()); outVal.write(out); @@ -172,7 +165,7 @@ /** * @return the obj */ - public RowContainer getObj() { + public MapJoinRowContainer getObj() { return obj; } @@ -180,13 +173,8 @@ * @param obj * the obj to set */ - public void setObj(RowContainer obj) { + public void setObj(MapJoinRowContainer obj) { this.obj = obj; } - public void setConf(Configuration conf) { - this.conf = conf; - bucketSize = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVEMAPJOINBUCKETCACHESIZE); - } - } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinRowContainer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinRowContainer.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinRowContainer.java (revision 0) @@ -0,0 +1,82 @@ +package org.apache.hadoop.hive.ql.exec.persistence; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hive.ql.metadata.HiveException; +public class MapJoinRowContainer + extends AbstractRowContainer { + + private List list; + + private int index; + + public MapJoinRowContainer(){ + index = 0; + list = new ArrayList(1); + } + + @Override + public void add(Row t) throws HiveException { + list.add(t); + } + + + @Override + public Row first() throws HiveException { + index = 0; + if(index < list.size()){ + return list.get(index); + } + return null; + } + + @Override + public Row next() throws HiveException { + index++; + if(index < list.size()){ + return list.get(index); + } + return null; + + } + + /** + * Get the number of elements in the RowContainer. + * + * @return number of elements in the RowContainer + */ + @Override + public int size() { + return list.size(); + } + + /** + * Remove all elements in the RowContainer. + */ + @Override + public void clear() throws HiveException { + list.clear(); + index = 0; + } + + public List getList() { + return list; + } + + public void setList(List list) { + this.list = list; + } + + public void reset(MapJoinRowContainer other) throws HiveException{ + list.clear(); + Object[] obj; + for(obj = other.first(); obj!= null;obj=other.next()){ + ArrayList ele = new ArrayList(obj.length); + for(int i =0; i < obj.length;i++){ + ele.add(obj[i]); + } + list.add((Row)ele); + } + } +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinRowContainer.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinSingleKey.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinSingleKey.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinSingleKey.java (revision 0) @@ -0,0 +1,134 @@ +package org.apache.hadoop.hive.ql.exec.persistence; + +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.util.ArrayList; + +import org.apache.hadoop.hive.ql.exec.MapJoinMetaData; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.HashTableSinkObjectCtx; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; +import org.apache.hadoop.io.Writable; + +public class MapJoinSingleKey extends AbstractMapJoinKey{ + + + protected transient Object obj; + + public MapJoinSingleKey() { + } + + /** + * @param metadataTag + * @param obj + */ + public MapJoinSingleKey(Object obj) { + this.obj = obj; + } + + @Override + public boolean equals(Object o) { + if (o instanceof MapJoinSingleKey) { + MapJoinSingleKey mObj = (MapJoinSingleKey) o; + Object key = mObj.getObj(); + if ((obj == null) && (key == null)) { + return true; + } + if ((obj != null) && (key != null)){ + if(obj.equals(key)){ + return true; + } + } + } + return false; + } + + @Override + public int hashCode() { + int hashCode; + if(obj == null){ + hashCode =metadataTag; + }else{ + hashCode = 31 + obj.hashCode(); + } + return hashCode; + } + + @Override + public void readExternal(ObjectInput in) throws IOException, + ClassNotFoundException { + try { + // get the tableDesc from the map stored in the mapjoin operator + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( + Integer.valueOf(metadataTag)); + + Writable val = ctx.getSerDe().getSerializedClass().newInstance(); + val.readFields(in); + + + + ArrayList list = (ArrayList) ObjectInspectorUtils.copyToStandardObject(ctx + .getSerDe().deserialize(val), ctx.getSerDe().getObjectInspector(),ObjectInspectorCopyOption.WRITABLE); + + if(list == null){ + obj = null; + }else{ + obj = list.get(0); + } + + } catch (Exception e) { + throw new IOException(e); + } + + } + + @Override + public void writeExternal(ObjectOutput out) throws IOException { + try { + //out.writeInt(metadataTag); + // get the tableDesc from the map stored in the mapjoin operator + HashTableSinkObjectCtx ctx = MapJoinMetaData.get( + Integer.valueOf(metadataTag)); + + ArrayList list = MapJoinMetaData.getList(); + list.add(obj); + + // Different processing for key and value + Writable outVal = ctx.getSerDe().serialize(list, ctx.getStandardOI()); + outVal.write(out); + + } catch (SerDeException e) { + throw new IOException(e); + } + } + + + + /** + * @return the obj + */ + public Object getObj() { + return obj; + } + + /** + * @param obj + * the obj to set + */ + public void setObj(Object obj) { + this.obj = obj; + } + + @Override + public boolean hasAnyNulls(){ + if(obj == null) { + return true; + } + return false; + } + + + +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinSingleKey.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java (working copy) @@ -75,7 +75,8 @@ * support multiple reader interleaving reading. * */ -public class RowContainer> { +public class RowContainer> + extends AbstractRowContainer{ protected static Log LOG = LogFactory.getLog(RowContainer.class); @@ -120,6 +121,10 @@ JobConf jobCloneUsingLocalFs = null; private LocalFileSystem localFs; + public RowContainer( ) { + + } + public RowContainer(Configuration jc) throws HiveException { this(BLOCKSIZE, jc); } @@ -161,6 +166,7 @@ this.standardOI = oi; } + @Override public void add(Row t) throws HiveException { if (this.tblDesc != null) { if (addCursor >= blockSize) { // spill the current block to tmp file @@ -180,6 +186,7 @@ ++size; } + @Override public Row first() throws HiveException { if (size == 0) { return null; @@ -238,6 +245,7 @@ } + @Override public Row next() throws HiveException { if (!firstCalled) { @@ -366,6 +374,7 @@ * * @return number of elements in the RowContainer */ + @Override public int size() { return size; } @@ -435,6 +444,7 @@ /** * Remove all elements in the RowContainer. */ + @Override public void clear() throws HiveException { itrCursor = 0; addCursor = 0; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/test.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/test.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/test.java (revision 0) @@ -0,0 +1,35 @@ +package org.apache.hadoop.hive.ql.exec.persistence; + +import java.text.SimpleDateFormat; +import java.util.Calendar; + +import org.apache.hadoop.hive.ql.util.TimeUtil; + +public class test { + + /** + * @param args + */ + public static String now(){ + Calendar cal = Calendar.getInstance(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-mm-dd hh:mm:ss"); + return sdf.format(cal.getTime()); + } + + public static void main(String[] args) { + // TODO Auto-generated method stub + + try{ + System.out.println(TimeUtil.now()); + HashMapWrapper hash =new HashMapWrapper(); + for(int i = 0 ; i < 100000000;i++){ + hash.put(i, i); + } + System.out.println(TimeUtil.now()); + }catch(Exception e){ + e.printStackTrace(); + } + } + + +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/test.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java (working copy) @@ -23,8 +23,8 @@ import java.util.List; import java.util.Stack; -import org.apache.hadoop.hive.ql.exec.JDBMDummyOperator; -import org.apache.hadoop.hive.ql.exec.JDBMSinkOperator; +import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; @@ -35,8 +35,8 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.optimizer.physical.MapJoinResolver.LocalMapJoinProcCtx; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.JDBMDummyDesc; -import org.apache.hadoop.hive.ql.plan.JDBMSinkDesc; +import org.apache.hadoop.hive.ql.plan.HashTableDummyDesc; +import org.apache.hadoop.hive.ql.plan.HashTableSinkDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -79,8 +79,8 @@ MapJoinOperator mapJoinOp = (MapJoinOperator) nd; //create an new operator: JDBMSinkOperator - JDBMSinkDesc jdbmSinkDesc = new JDBMSinkDesc(mapJoinOp.getConf()); - JDBMSinkOperator jdbmSinkOp =(JDBMSinkOperator)OperatorFactory.get(jdbmSinkDesc); + HashTableSinkDesc jdbmSinkDesc = new HashTableSinkDesc(mapJoinOp.getConf()); + HashTableSinkOperator jdbmSinkOp =(HashTableSinkOperator)OperatorFactory.get(jdbmSinkDesc); //get the last operator for processing big tables @@ -109,8 +109,8 @@ smallTablesParentOp.add(parent); //create an new operator: JDBMDummyOpeator, which share the table desc - JDBMDummyDesc desc = new JDBMDummyDesc(); - JDBMDummyOperator dummyOp =(JDBMDummyOperator)OperatorFactory.get(desc); + HashTableDummyDesc desc = new HashTableDummyDesc(); + HashTableDummyOperator dummyOp =(HashTableDummyOperator)OperatorFactory.get(desc); TableDesc tbl; if(parent.getSchema()==null){ Index: ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java (working copy) @@ -23,8 +23,8 @@ * JDBM Dummy Descriptor implementation. * */ -@Explain(displayName = "JDBMDummy Operator") -public class JDBMDummyDesc implements Serializable { +@Explain(displayName = "Hash Table Dummy Operator") +public class HashTableDummyDesc implements Serializable { private TableDesc tbl; public TableDesc getTbl() { Property changes on: ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java ___________________________________________________________________ Added: svn:mergeinfo Index: ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java (working copy) @@ -32,8 +32,8 @@ * Map Join operator Descriptor implementation. * */ -@Explain(displayName = "JDBM Sink Operator") -public class JDBMSinkDesc extends JoinDesc implements Serializable { +@Explain(displayName = "Hash Table Sink Operator") +public class HashTableSinkDesc extends JoinDesc implements Serializable { private static final long serialVersionUID = 1L; @@ -78,11 +78,11 @@ private LinkedHashMap>> aliasBucketFileNameMapping; private LinkedHashMap bucketFileNameMapping; - public JDBMSinkDesc() { + public HashTableSinkDesc() { bucketFileNameMapping = new LinkedHashMap(); } - public JDBMSinkDesc(MapJoinDesc clone) { + public HashTableSinkDesc(MapJoinDesc clone) { this.bigKeysDirMap = clone.getBigKeysDirMap(); this.conds = clone.getConds(); this.exprs= clone.getExprs(); Property changes on: ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java ___________________________________________________________________ Added: svn:mergeinfo Index: ql/src/java/org/apache/hadoop/hive/ql/plan/JDBMDummyDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/JDBMDummyDesc.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/JDBMDummyDesc.java (working copy) @@ -1,38 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -/** - * JDBM Dummy Descriptor implementation. - * - */ -@Explain(displayName = "JDBMDummy Operator") -public class JDBMDummyDesc implements Serializable { - private TableDesc tbl; - - public TableDesc getTbl() { - return tbl; - } - - public void setTbl(TableDesc tbl) { - this.tbl = tbl; - } - -} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/JDBMSinkDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/JDBMSinkDesc.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/JDBMSinkDesc.java (working copy) @@ -1,355 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Map.Entry; - -/** - * Map Join operator Descriptor implementation. - * - */ -@Explain(displayName = "JDBM Sink Operator") -public class JDBMSinkDesc extends JoinDesc implements Serializable { - private static final long serialVersionUID = 1L; - - - // used to handle skew join - private boolean handleSkewJoin = false; - private int skewKeyDefinition = -1; - private Map bigKeysDirMap; - private Map> smallKeysDirMap; - private Map skewKeysValuesTables; - - // alias to key mapping - private Map> exprs; - - // alias to filter mapping - private Map> filters; - - // used for create joinOutputObjectInspector - protected List outputColumnNames; - - // key:column output name, value:tag - private transient Map reversedExprs; - - // No outer join involved - protected boolean noOuterJoin; - - protected JoinCondDesc[] conds; - - protected Byte[] tagOrder; - private TableDesc keyTableDesc; - - - private Map> keys; - private TableDesc keyTblDesc; - private List valueTblDescs; - - private int posBigTable; - - private Map> retainList; - - private transient String bigTableAlias; - - private LinkedHashMap>> aliasBucketFileNameMapping; - private LinkedHashMap bucketFileNameMapping; - - public JDBMSinkDesc() { - bucketFileNameMapping = new LinkedHashMap(); - } - - public JDBMSinkDesc(MapJoinDesc clone) { - this.bigKeysDirMap = clone.getBigKeysDirMap(); - this.conds = clone.getConds(); - this.exprs= clone.getExprs(); - this.handleSkewJoin = clone.getHandleSkewJoin(); - this.keyTableDesc = clone.getKeyTableDesc(); - this.noOuterJoin = clone.getNoOuterJoin(); - this.outputColumnNames = clone.getOutputColumnNames(); - this.reversedExprs = clone.getReversedExprs(); - this.skewKeyDefinition = clone.getSkewKeyDefinition(); - this.skewKeysValuesTables = clone.getSkewKeysValuesTables(); - this.smallKeysDirMap = clone.getSmallKeysDirMap(); - this.tagOrder = clone.getTagOrder(); - this.filters = clone.getFilters(); - - this.keys = clone.getKeys(); - this.keyTblDesc = clone.getKeyTblDesc(); - this.valueTblDescs = clone.getValueTblDescs(); - this.posBigTable = clone.getPosBigTable(); - this.retainList = clone.getRetainList(); - this.bigTableAlias = clone.getBigTableAlias(); - this.aliasBucketFileNameMapping = clone.getAliasBucketFileNameMapping(); - this.bucketFileNameMapping = clone.getBucketFileNameMapping(); - } - - - private void initRetainExprList() { - retainList = new HashMap>(); - Set>> set = exprs.entrySet(); - Iterator>> setIter = set.iterator(); - while (setIter.hasNext()) { - Entry> current = setIter.next(); - List list = new ArrayList(); - for (int i = 0; i < current.getValue().size(); i++) { - list.add(i); - } - retainList.put(current.getKey(), list); - } - } - - public boolean isHandleSkewJoin() { - return handleSkewJoin; - } - - @Override - public void setHandleSkewJoin(boolean handleSkewJoin) { - this.handleSkewJoin = handleSkewJoin; - } - - @Override - public int getSkewKeyDefinition() { - return skewKeyDefinition; - } - - @Override - public void setSkewKeyDefinition(int skewKeyDefinition) { - this.skewKeyDefinition = skewKeyDefinition; - } - - @Override - public Map getBigKeysDirMap() { - return bigKeysDirMap; - } - - @Override - public void setBigKeysDirMap(Map bigKeysDirMap) { - this.bigKeysDirMap = bigKeysDirMap; - } - - @Override - public Map> getSmallKeysDirMap() { - return smallKeysDirMap; - } - - @Override - public void setSmallKeysDirMap(Map> smallKeysDirMap) { - this.smallKeysDirMap = smallKeysDirMap; - } - - @Override - public Map getSkewKeysValuesTables() { - return skewKeysValuesTables; - } - - @Override - public void setSkewKeysValuesTables(Map skewKeysValuesTables) { - this.skewKeysValuesTables = skewKeysValuesTables; - } - - @Override - public Map> getExprs() { - return exprs; - } - - @Override - public void setExprs(Map> exprs) { - this.exprs = exprs; - } - - @Override - public Map> getFilters() { - return filters; - } - - @Override - public void setFilters(Map> filters) { - this.filters = filters; - } - - @Override - public List getOutputColumnNames() { - return outputColumnNames; - } - - @Override - public void setOutputColumnNames(List outputColumnNames) { - this.outputColumnNames = outputColumnNames; - } - - @Override - public Map getReversedExprs() { - return reversedExprs; - } - - @Override - public void setReversedExprs(Map reversedExprs) { - this.reversedExprs = reversedExprs; - } - - @Override - public boolean isNoOuterJoin() { - return noOuterJoin; - } - - @Override - public void setNoOuterJoin(boolean noOuterJoin) { - this.noOuterJoin = noOuterJoin; - } - - @Override - public JoinCondDesc[] getConds() { - return conds; - } - - @Override - public void setConds(JoinCondDesc[] conds) { - this.conds = conds; - } - - @Override - public Byte[] getTagOrder() { - return tagOrder; - } - - @Override - public void setTagOrder(Byte[] tagOrder) { - this.tagOrder = tagOrder; - } - - @Override - public TableDesc getKeyTableDesc() { - return keyTableDesc; - } - - @Override - public void setKeyTableDesc(TableDesc keyTableDesc) { - this.keyTableDesc = keyTableDesc; - } - - - public Map> getRetainList() { - return retainList; - } - - public void setRetainList(Map> retainList) { - this.retainList = retainList; - } - - /** - * @return the keys - */ - @Explain(displayName = "keys") - public Map> getKeys() { - return keys; - } - - /** - * @param keys - * the keys to set - */ - public void setKeys(Map> keys) { - this.keys = keys; - } - - /** - * @return the position of the big table not in memory - */ - @Explain(displayName = "Position of Big Table") - public int getPosBigTable() { - return posBigTable; - } - - /** - * @param posBigTable - * the position of the big table not in memory - */ - public void setPosBigTable(int posBigTable) { - this.posBigTable = posBigTable; - } - - /** - * @return the keyTblDesc - */ - public TableDesc getKeyTblDesc() { - return keyTblDesc; - } - - /** - * @param keyTblDesc - * the keyTblDesc to set - */ - public void setKeyTblDesc(TableDesc keyTblDesc) { - this.keyTblDesc = keyTblDesc; - } - - /** - * @return the valueTblDescs - */ - public List getValueTblDescs() { - return valueTblDescs; - } - - /** - * @param valueTblDescs - * the valueTblDescs to set - */ - public void setValueTblDescs(List valueTblDescs) { - this.valueTblDescs = valueTblDescs; - } - - /** - * @return bigTableAlias - */ - public String getBigTableAlias() { - return bigTableAlias; - } - - /** - * @param bigTableAlias - */ - public void setBigTableAlias(String bigTableAlias) { - this.bigTableAlias = bigTableAlias; - } - - public LinkedHashMap>> getAliasBucketFileNameMapping() { - return aliasBucketFileNameMapping; - } - - public void setAliasBucketFileNameMapping( - LinkedHashMap>> aliasBucketFileNameMapping) { - this.aliasBucketFileNameMapping = aliasBucketFileNameMapping; - } - - public LinkedHashMap getBucketFileNameMapping() { - return bucketFileNameMapping; - } - - public void setBucketFileNameMapping(LinkedHashMap bucketFileNameMapping) { - this.bucketFileNameMapping = bucketFileNameMapping; - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/util/JoinUtil.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/util/JoinUtil.java (revision 1029845) +++ ql/src/java/org/apache/hadoop/hive/ql/util/JoinUtil.java (working copy) @@ -28,6 +28,10 @@ import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinDoubleKeys; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectKey; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinSingleKey; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -156,6 +160,78 @@ } /** + * Return the key as a standard object. StandardObject can be inspected by a + * standard ObjectInspector. + */ + public static AbstractMapJoinKey computeMapJoinKeys(Object row, + List keyFields, List keyFieldsOI) + throws HiveException { + + int size = keyFields.size(); + if(size == 1){ + Object obj = (ObjectInspectorUtils.copyToStandardObject(keyFields.get(0) + .evaluate(row), keyFieldsOI.get(0), + ObjectInspectorCopyOption.WRITABLE)); + MapJoinSingleKey key = new MapJoinSingleKey(obj); + return key; + }else if(size == 2){ + Object obj1 = (ObjectInspectorUtils.copyToStandardObject(keyFields.get(0) + .evaluate(row), keyFieldsOI.get(0), + ObjectInspectorCopyOption.WRITABLE)); + + Object obj2 = (ObjectInspectorUtils.copyToStandardObject(keyFields.get(1) + .evaluate(row), keyFieldsOI.get(1), + ObjectInspectorCopyOption.WRITABLE)); + + MapJoinDoubleKeys key = new MapJoinDoubleKeys(obj1,obj2); + return key; + }else{ + // Compute the keys + Object[] nr = new Object[keyFields.size()]; + for (int i = 0; i < keyFields.size(); i++) { + + nr[i] = (ObjectInspectorUtils.copyToStandardObject(keyFields.get(i) + .evaluate(row), keyFieldsOI.get(i), + ObjectInspectorCopyOption.WRITABLE)); + } + MapJoinObjectKey key = new MapJoinObjectKey(nr); + return key; + } + } + + + + + + /** + * Return the value as a standard object. StandardObject can be inspected by a + * standard ObjectInspector. + */ + public static Object[] computeMapJoinValues(Object row, + List valueFields, List valueFieldsOI, + List filters, List filtersOI, + boolean noOuterJoin) throws HiveException { + + // Compute the keys + Object[] nr; + if (!noOuterJoin) { + nr = new Object[valueFields.size()+1]; + // add whether the row is filtered or not. + nr[valueFields.size()] = new BooleanWritable(isFiltered(row, filters, filtersOI)); + }else{ + nr = new Object[valueFields.size()]; + } + + for (int i = 0; i < valueFields.size(); i++) { + nr[i] = ObjectInspectorUtils.copyToStandardObject(valueFields.get(i) + .evaluate(row), valueFieldsOI.get(i), + ObjectInspectorCopyOption.WRITABLE); + } + + return nr; + } + + /** * Return the value as a standard object. StandardObject can be inspected by a * standard ObjectInspector. */ Index: ql/src/java/org/apache/hadoop/hive/ql/util/PathUtil.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/util/PathUtil.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/util/PathUtil.java (revision 0) @@ -0,0 +1,20 @@ +package org.apache.hadoop.hive.ql.util; + +import org.apache.hadoop.fs.Path; + +public class PathUtil { + public static String suffix=".hashtable"; + public static String generatePath(String baseURI,Byte tag,String bigBucketFileName){ + String path = new String(baseURI+Path.SEPARATOR+"-"+tag+"-"+bigBucketFileName+suffix); + return path; + } + public static String generateFileName(Byte tag,String bigBucketFileName){ + String fileName = new String("-"+tag+"-"+bigBucketFileName+suffix); + return fileName; + } + + public static String generateTmpURI(String baseURI,String id){ + String tmpFileURI = new String(baseURI+Path.SEPARATOR+"HashTable-"+id); + return tmpFileURI; + } +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/util/PathUtil.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java (revision 0) @@ -0,0 +1,12 @@ +package org.apache.hadoop.hive.ql.util; + +import java.text.SimpleDateFormat; +import java.util.Calendar; + +public class TimeUtil { + public static String now(){ + Calendar cal = Calendar.getInstance(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); + return sdf.format(cal.getTime()); + } +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/util/TimeUtil.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -109,7 +109,7 @@ predicate: expr: (ds = '2008-04-08') type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} {ds} @@ -122,10 +122,10 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket21.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Stage: Stage-1 Map Reduce @@ -174,9 +174,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -186,12 +186,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205432 + transient_lastDdlTime 1288650014 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -201,9 +201,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -215,12 +215,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205430 + transient_lastDdlTime 1288650006 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -232,12 +232,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205430 + transient_lastDdlTime 1288650006 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -249,14 +249,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -266,28 +266,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205432 + transient_lastDdlTime 1288650014 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -298,12 +298,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205432 + transient_lastDdlTime 1288650014 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -311,9 +311,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-50-32_841_1924740087439249464/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-20-14_474_8339072243379376566/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -324,12 +324,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205432 + transient_lastDdlTime 1288650014 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -340,12 +340,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205432 + transient_lastDdlTime 1288650014 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -373,11 +373,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-50-43_701_3507299544570748176/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-20-29_565_6644333986010981360/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-50-43_701_3507299544570748176/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-20-29_565_6644333986010981360/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -426,11 +426,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-00_110_7718020573393376721/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-20-53_907_3894360990815148820/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-00_110_7718020573393376721/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-20-53_907_3894360990815148820/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -469,14 +469,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-06_973_1458771141822302428/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-21-06_048_6153422867951057602/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-06_973_1458771141822302428/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-21-06_048_6153422867951057602/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -538,7 +538,7 @@ TableScan alias: a GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} {ds} @@ -551,12 +551,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Stage: Stage-1 Map Reduce @@ -610,9 +610,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -622,7 +622,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -631,7 +631,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205460 + transient_lastDdlTime 1288650053 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -641,9 +641,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -657,13 +657,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205430 + transient_lastDdlTime 1288650006 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -675,13 +675,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205430 + transient_lastDdlTime 1288650006 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -693,14 +693,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -710,7 +710,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -719,23 +719,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205460 + transient_lastDdlTime 1288650053 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -746,7 +746,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -755,7 +755,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205460 + transient_lastDdlTime 1288650053 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -763,9 +763,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-10_359_364505822013813544/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-21-11_707_393550245150051948/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -776,7 +776,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -785,7 +785,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205460 + transient_lastDdlTime 1288650053 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -796,7 +796,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -805,7 +805,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205460 + transient_lastDdlTime 1288650053 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -845,11 +845,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-19_376_8798110857325074167/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-21-29_938_1821837043001591804/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-19_376_8798110857325074167/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-21-29_938_1821837043001591804/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -934,11 +934,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-33_203_5193422920297774186/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-21-55_520_6434670321334373451/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-33_203_5193422920297774186/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-21-55_520_6434670321334373451/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1001,14 +1001,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-40_262_7723348030525111689/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-22-06_499_9090555875123889567/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-40_262_7723348030525111689/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-22-06_499_9090555875123889567/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -114,7 +114,7 @@ predicate: expr: (ds = '2008-04-08') type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -127,10 +127,10 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Stage: Stage-1 Map Reduce @@ -172,9 +172,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -184,12 +184,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205507 + transient_lastDdlTime 1288650143 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -199,9 +199,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -213,12 +213,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205505 + transient_lastDdlTime 1288650136 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -230,12 +230,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205505 + transient_lastDdlTime 1288650136 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -247,14 +247,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -264,28 +264,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205507 + transient_lastDdlTime 1288650143 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -296,12 +296,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205507 + transient_lastDdlTime 1288650143 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -309,9 +309,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-51-47_621_5486744443118933537/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-22-23_903_5862472836628016653/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -322,12 +322,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205507 + transient_lastDdlTime 1288650143 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -338,12 +338,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205507 + transient_lastDdlTime 1288650143 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -371,11 +371,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-55_223_7595198046216280886/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-22-38_853_8940633369092466541/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-51-55_223_7595198046216280886/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-22-38_853_8940633369092466541/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part_2)b.FieldSchema(name:value, type:string, comment:null), ] @@ -424,11 +424,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-09_771_3714042013527827222/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-03_789_2945651119167740439/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-09_771_3714042013527827222/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-03_789_2945651119167740439/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -467,14 +467,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-16_954_6426760666949752724/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-14_909_7167626911022416977/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-16_954_6426760666949752724/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-14_909_7167626911022416977/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -536,7 +536,7 @@ TableScan alias: a GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -549,10 +549,10 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 Stage: Stage-1 Map Reduce @@ -604,9 +604,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -616,7 +616,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -625,7 +625,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 - transient_lastDdlTime 1288205529 + transient_lastDdlTime 1288650183 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -635,9 +635,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -651,13 +651,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205506 + transient_lastDdlTime 1288650141 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -669,13 +669,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205506 + transient_lastDdlTime 1288650141 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -687,14 +687,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -704,7 +704,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -713,23 +713,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 - transient_lastDdlTime 1288205529 + transient_lastDdlTime 1288650183 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -740,7 +740,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -749,7 +749,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 - transient_lastDdlTime 1288205529 + transient_lastDdlTime 1288650183 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -757,9 +757,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-19_732_1742896222185590327/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-23-20_535_1312598400950950732/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -770,7 +770,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -779,7 +779,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 - transient_lastDdlTime 1288205529 + transient_lastDdlTime 1288650183 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -790,7 +790,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -799,7 +799,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 0 - transient_lastDdlTime 1288205529 + transient_lastDdlTime 1288650183 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -839,11 +839,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-26_971_1828709485189696923/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-34_528_3070656571358913387/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-26_971_1828709485189696923/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-34_528_3070656571358913387/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -928,11 +928,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-40_814_4732884916997134054/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-58_364_6926535512479448236/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-40_814_4732884916997134054/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-23-58_364_6926535512479448236/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -995,14 +995,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-48_221_6524686633397717886/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-24-09_411_1344187366571392469/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-52-48_221_6524686633397717886/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-24-09_411_1344187366571392469/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -114,7 +114,7 @@ predicate: expr: (ds = '2008-04-08') type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -127,10 +127,10 @@ Alias Bucket Base File Name Mapping: b {srcbucket22.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket23.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 Stage: Stage-1 Map Reduce @@ -182,9 +182,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -194,12 +194,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212927 + transient_lastDdlTime 1288650267 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -209,9 +209,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [a] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -225,13 +225,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212926 + transient_lastDdlTime 1288650264 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -243,13 +243,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212926 + transient_lastDdlTime 1288650264 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -261,14 +261,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -278,28 +278,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212927 + transient_lastDdlTime 1288650267 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -310,12 +310,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212927 + transient_lastDdlTime 1288650267 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -323,9 +323,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-55-27_657_736403967432283787/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-24-27_183_2390650598810630397/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -336,12 +336,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212927 + transient_lastDdlTime 1288650267 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -352,12 +352,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212927 + transient_lastDdlTime 1288650267 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -385,11 +385,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-55-36_058_2513418815469973596/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-24-44_305_1104031879555892657/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-55-36_058_2513418815469973596/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-24-44_305_1104031879555892657/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin_part_2)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -438,11 +438,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-55-50_209_4160091089758225520/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-25-09_256_6305814239350306024/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-55-50_209_4160091089758225520/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-25-09_256_6305814239350306024/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -481,14 +481,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-55-56_715_6048616112063548594/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-25-20_426_3287188176109029760/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-55-56_715_6048616112063548594/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-25-20_426_3287188176109029760/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -560,7 +560,7 @@ predicate: expr: (ds = '2008-04-08') type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -573,12 +573,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt], srcbucket22.txt=[srcbucket22.txt], srcbucket23.txt=[srcbucket23.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Stage: Stage-1 Map Reduce @@ -630,9 +630,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -642,7 +642,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -651,7 +651,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 - transient_lastDdlTime 1288212950 + transient_lastDdlTime 1288650309 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -661,9 +661,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -677,13 +677,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212924 + transient_lastDdlTime 1288650259 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -695,13 +695,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212924 + transient_lastDdlTime 1288650259 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -713,14 +713,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -730,7 +730,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -739,23 +739,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 - transient_lastDdlTime 1288212950 + transient_lastDdlTime 1288650309 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -766,7 +766,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -775,7 +775,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 - transient_lastDdlTime 1288212950 + transient_lastDdlTime 1288650309 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -783,9 +783,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-56-00_310_8311365964032191828/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-25-26_107_3717485540950596060/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -796,7 +796,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -805,7 +805,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 - transient_lastDdlTime 1288212950 + transient_lastDdlTime 1288650309 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -816,7 +816,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -825,7 +825,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11067 - transient_lastDdlTime 1288212950 + transient_lastDdlTime 1288650309 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -865,11 +865,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-56-09_791_8706691458656943408/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-25-43_617_5194148307783795417/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-56-09_791_8706691458656943408/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-25-43_617_5194148307783795417/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -954,11 +954,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-56-23_619_4418056010014368503/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-26-09_245_3915270002121305646/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-56-23_619_4418056010014368503/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-26-09_245_3915270002121305646/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1021,14 +1021,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-56-30_141_4118421935613596878/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-26-20_273_5656359965078469619/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-56-30_141_4118421935613596878/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-26-20_273_5656359965078469619/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -104,7 +104,7 @@ TableScan alias: b GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -117,10 +117,10 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + b {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Stage: Stage-1 Map Reduce @@ -162,9 +162,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -174,12 +174,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205575 + transient_lastDdlTime 1288650397 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -189,9 +189,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -203,12 +203,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205572 + transient_lastDdlTime 1288650389 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -220,12 +220,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205572 + transient_lastDdlTime 1288650389 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -237,14 +237,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -254,28 +254,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205575 + transient_lastDdlTime 1288650397 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -286,12 +286,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205575 + transient_lastDdlTime 1288650397 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -299,9 +299,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-52-55_191_6284354312574445049/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-26-37_698_7271549744271200051/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -312,12 +312,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205575 + transient_lastDdlTime 1288650397 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -328,12 +328,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205575 + transient_lastDdlTime 1288650397 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -359,11 +359,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-02_738_4577457943531711045/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-26-51_687_7933516826509845634/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-02_738_4577457943531711045/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-26-51_687_7933516826509845634/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin)b.FieldSchema(name:value, type:string, comment:null), ] @@ -410,11 +410,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-16_429_5837890045100564683/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-27-14_043_398409329103016745/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-16_429_5837890045100564683/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-27-14_043_398409329103016745/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -453,14 +453,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-24_119_5713666769406989600/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-27-25_210_7398332889155314673/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-24_119_5713666769406989600/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-27-25_210_7398332889155314673/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -522,7 +522,7 @@ TableScan alias: a GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -535,10 +535,10 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Stage: Stage-1 Map Reduce @@ -580,9 +580,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -592,7 +592,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -601,7 +601,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205596 + transient_lastDdlTime 1288650433 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -611,9 +611,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [b] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -625,12 +625,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205572 + transient_lastDdlTime 1288650389 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -642,12 +642,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205572 + transient_lastDdlTime 1288650389 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -659,14 +659,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -676,7 +676,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -685,23 +685,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205596 + transient_lastDdlTime 1288650433 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -712,7 +712,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -721,7 +721,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205596 + transient_lastDdlTime 1288650433 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -729,9 +729,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-27_274_740197042204524306/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-27-31_011_6717768929903756977/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -742,7 +742,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -751,7 +751,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205596 + transient_lastDdlTime 1288650433 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -762,7 +762,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -771,7 +771,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288205596 + transient_lastDdlTime 1288650433 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -809,11 +809,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-34_796_7433622090937468625/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-27-46_602_7484956047403270691/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-34_796_7433622090937468625/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-27-46_602_7484956047403270691/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -896,11 +896,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-48_705_2477192820764361821/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-28-10_006_886523760409495372/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-48_705_2477192820764361821/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-28-10_006_886523760409495372/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -963,14 +963,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-55_192_5237681680689076654/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-28-21_048_5975898411069883691/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-53-55_192_5237681680689076654/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-28-21_048_5975898411069883691/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -134,7 +134,7 @@ TableScan alias: a GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -147,16 +147,16 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3 Stage: Stage-1 Map Reduce @@ -198,9 +198,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -210,12 +210,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213068 + transient_lastDdlTime 1288650527 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -225,10 +225,10 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -242,13 +242,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213061 + transient_lastDdlTime 1288650510 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -260,17 +260,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213061 + transient_lastDdlTime 1288650510 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -284,13 +284,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213061 + transient_lastDdlTime 1288650510 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -302,13 +302,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213061 + transient_lastDdlTime 1288650510 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -320,14 +320,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -337,28 +337,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213068 + transient_lastDdlTime 1288650527 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -369,12 +369,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213068 + transient_lastDdlTime 1288650527 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -382,9 +382,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-57-48_658_4685912752018510968/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-28-47_892_3099520402660203602/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -395,12 +395,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213068 + transient_lastDdlTime 1288650527 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -411,12 +411,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213068 + transient_lastDdlTime 1288650527 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -446,11 +446,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-00_476_6141696786840834340/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-29-14_673_7845787468718039068/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-00_476_6141696786840834340/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-29-14_673_7845787468718039068/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -501,11 +501,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-16_764_7387427002932242988/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-29-45_469_2898609081763285179/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-16_764_7387427002932242988/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-29-45_469_2898609081763285179/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -544,14 +544,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-24_273_8996797782903684116/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-29-58_438_4994983906126618484/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-24_273_8996797782903684116/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-29-58_438_4994983906126618484/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -613,7 +613,7 @@ TableScan alias: a GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -626,12 +626,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1 Stage: Stage-1 Map Reduce @@ -673,9 +673,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -685,7 +685,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -694,7 +694,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 - transient_lastDdlTime 1288213096 + transient_lastDdlTime 1288650585 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -704,10 +704,10 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -721,13 +721,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213066 + transient_lastDdlTime 1288650522 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -739,17 +739,17 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213066 + transient_lastDdlTime 1288650522 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 Partition base file name: ds=2008-04-09 input format: org.apache.hadoop.mapred.TextInputFormat @@ -763,13 +763,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213066 + transient_lastDdlTime 1288650522 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -781,13 +781,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part_2 name srcbucket_mapjoin_part_2 partition_columns ds serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213066 + transient_lastDdlTime 1288650522 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part_2 name: srcbucket_mapjoin_part_2 @@ -799,14 +799,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -816,7 +816,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -825,23 +825,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 - transient_lastDdlTime 1288213096 + transient_lastDdlTime 1288650585 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -852,7 +852,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -861,7 +861,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 - transient_lastDdlTime 1288213096 + transient_lastDdlTime 1288650585 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -869,9 +869,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-58-27_067_2152368000902411635/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-30-03_882_2938586219616660137/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -882,7 +882,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -891,7 +891,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 - transient_lastDdlTime 1288213096 + transient_lastDdlTime 1288650585 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -902,7 +902,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -911,7 +911,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17966 - transient_lastDdlTime 1288213096 + transient_lastDdlTime 1288650585 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -953,11 +953,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-35_836_7705379100464575308/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-30-23_397_5260968403924198010/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-35_836_7705379100464575308/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-30-23_397_5260968403924198010/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -1044,11 +1044,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-49_907_5039198453352841568/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-30-49_074_4217586447894049377/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-49_907_5039198453352841568/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-30-49_074_4217586447894049377/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1111,14 +1111,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-56_380_2556535610013066769/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-30-59_082_3779811683523766084/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_13-58-56_380_2556535610013066769/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_15-30-59_082_3779811683523766084/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -79,7 +79,7 @@ predicate: expr: (ds = '2008-04-08') type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} {ds} @@ -136,9 +136,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -148,12 +148,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213143 + transient_lastDdlTime 1288650674 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -163,9 +163,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -177,12 +177,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213141 + transient_lastDdlTime 1288650670 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -194,12 +194,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213141 + transient_lastDdlTime 1288650670 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -211,14 +211,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -228,28 +228,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213143 + transient_lastDdlTime 1288650674 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -260,12 +260,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213143 + transient_lastDdlTime 1288650674 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -273,9 +273,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-03_845_4227055704170815222/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-14_411_1346615739045454741/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -286,12 +286,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213143 + transient_lastDdlTime 1288650674 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -302,12 +302,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213143 + transient_lastDdlTime 1288650674 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy) @@ -79,7 +79,7 @@ TableScan alias: b GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -129,9 +129,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -141,12 +141,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213147 + transient_lastDdlTime 1288650683 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -156,9 +156,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -170,12 +170,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213145 + transient_lastDdlTime 1288650678 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -187,12 +187,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213145 + transient_lastDdlTime 1288650678 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -204,14 +204,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -221,28 +221,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213147 + transient_lastDdlTime 1288650683 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -253,12 +253,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213147 + transient_lastDdlTime 1288650683 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -266,9 +266,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_13-59-07_847_8947357711223644876/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_15-31-23_510_9117996009920475416/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -279,12 +279,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213147 + transient_lastDdlTime 1288650683 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -295,12 +295,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288213147 + transient_lastDdlTime 1288650683 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result Index: ql/src/test/results/clientpositive/join25.q.out =================================================================== --- ql/src/test/results/clientpositive/join25.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join25.q.out (working copy) @@ -36,7 +36,7 @@ x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -109,7 +109,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-58_967_6806228839153646653/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-37_025_3765852423554717085/-ext-10000 Stage: Stage-0 Move Operator @@ -127,7 +127,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-53-58_967_6806228839153646653/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-37_025_3765852423554717085/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -158,11 +158,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-04_814_7186456825570807641/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-16-45_737_8197095690995905449/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-04_814_7186456825570807641/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-16-45_737_8197095690995905449/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)x.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -42,7 +42,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {value} @@ -57,7 +57,7 @@ TableScan alias: y GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {value} @@ -127,9 +127,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -139,12 +139,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205647 + transient_lastDdlTime 1288653410 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -154,9 +154,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -170,13 +170,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205423 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -187,13 +187,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205423 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -205,14 +205,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -222,28 +222,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205647 + transient_lastDdlTime 1288653410 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -254,12 +254,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205647 + transient_lastDdlTime 1288653410 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -267,9 +267,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-07_868_2505058392721838239/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-16-50_324_2469466979807177713/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -280,12 +280,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205647 + transient_lastDdlTime 1288653410 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -296,12 +296,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205647 + transient_lastDdlTime 1288653410 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -331,11 +331,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-13_992_6168800501053867920/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-16-59_918_3897964192683234759/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-13_992_6168800501053867920/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-16-59_918_3897964192683234759/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join27.q.out =================================================================== --- ql/src/test/results/clientpositive/join27.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join27.q.out (working copy) @@ -36,7 +36,7 @@ x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -109,7 +109,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-17_054_2541334443319399043/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-17-04_595_6142296159841366888/-ext-10000 Stage: Stage-0 Move Operator @@ -127,7 +127,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-17_054_2541334443319399043/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-17-04_595_6142296159841366888/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -158,11 +158,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key, x.value PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-22_385_7137802075538214766/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-17-13_178_216044341882610297/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key, x.value POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-22_385_7137802075538214766/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-17-13_178_216044341882610297/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)x.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join28.q.out =================================================================== --- ql/src/test/results/clientpositive/join28.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join28.q.out (working copy) @@ -45,7 +45,7 @@ subq:x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -69,7 +69,7 @@ predicate: expr: (hr = 11) type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col0} 1 {value} @@ -151,7 +151,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-25_731_6161190954879287426/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-17-17_755_4991443405858643706/-ext-10000 Stage: Stage-0 Move Operator @@ -169,7 +169,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-25_731_6161190954879287426/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-17-17_755_4991443405858643706/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -207,11 +207,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-31_701_2363624034797918899/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-17-27_626_8694565253775534592/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-31_701_2363624034797918899/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-17-27_626_8694565253775534592/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] 128 val_128 Index: ql/src/test/results/clientpositive/join29.q.out =================================================================== --- ql/src/test/results/clientpositive/join29.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join29.q.out (working copy) @@ -89,12 +89,12 @@ Stage: Stage-9 Map Reduce Local Work Alias -> Map Local Tables: - file:/tmp/njain/hive_2010-10-27_11-54-34_757_9120981059274795938/-mr-10004 + file:/tmp/liyintang/hive_2010-11-01_16-17-32_167_2815764218524149876/-mr-10004 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-54-34_757_9120981059274795938/-mr-10004 - JDBM Sink Operator + file:/tmp/liyintang/hive_2010-11-01_16-17-32_167_2815764218524149876/-mr-10004 + Hash Table Sink Operator condition expressions: 0 {_col0} {_col1} 1 {_col1} @@ -107,7 +107,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-54-34_757_9120981059274795938/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-17-32_167_2815764218524149876/-mr-10002 Map Join Operator condition map: Inner Join 0 to 1 @@ -165,7 +165,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-34_757_9120981059274795938/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-17-32_167_2815764218524149876/-ext-10000 Stage: Stage-0 Move Operator @@ -183,7 +183,7 @@ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-54-34_757_9120981059274795938/-ext-10003 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-17-32_167_2815764218524149876/-ext-10003 File Output Operator compressed: false GlobalTableId: 0 @@ -272,11 +272,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-46_357_4458958652114796552/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-17-47_539_3589278589695889631/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-46_357_4458958652114796552/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-17-47_539_3589278589695889631/-mr-10000 POSTHOOK: Lineage: dest_j1.cnt1 EXPRESSION [(src1)x.null, ] POSTHOOK: Lineage: dest_j1.cnt2 EXPRESSION [(src)y.null, ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join30.q.out =================================================================== --- ql/src/test/results/clientpositive/join30.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join30.q.out (working copy) @@ -32,7 +32,7 @@ x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -72,7 +72,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-54-49_452_8944509394940573381/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-18-00_883_1287112401261798531/-mr-10002 Select Operator expressions: expr: _col0 @@ -168,11 +168,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-56_890_5863714555389240048/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-18-12_574_4265273674235722754/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-54-56_890_5863714555389240048/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-18-12_574_4265273674235722754/-mr-10000 POSTHOOK: Lineage: dest_j1.cnt EXPRESSION [(src1)x.null, (src)y.null, ] POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] 66 1 Index: ql/src/test/results/clientpositive/join31.q.out =================================================================== --- ql/src/test/results/clientpositive/join31.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join31.q.out (working copy) @@ -87,12 +87,12 @@ Stage: Stage-7 Map Reduce Local Work Alias -> Map Local Tables: - file:/tmp/njain/hive_2010-10-27_11-55-00_152_4091108333306141197/-mr-10004 + file:/tmp/liyintang/hive_2010-11-01_16-18-17_137_5903602741211650853/-mr-10004 Fetch Operator limit: -1 Alias -> Map Local Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-55-00_152_4091108333306141197/-mr-10004 - JDBM Sink Operator + file:/tmp/liyintang/hive_2010-11-01_16-18-17_137_5903602741211650853/-mr-10004 + Hash Table Sink Operator condition expressions: 0 {_col0} 1 @@ -105,7 +105,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-55-00_152_4091108333306141197/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-18-17_137_5903602741211650853/-mr-10002 Map Join Operator condition map: Inner Join 0 to 1 @@ -130,7 +130,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-55-00_152_4091108333306141197/-mr-10003 + file:/tmp/liyintang/hive_2010-11-01_16-18-17_137_5903602741211650853/-mr-10003 Select Operator expressions: expr: _col0 @@ -286,11 +286,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-14_996_8245169574800468911/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-18-36_861_7446237204560549185/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-14_996_8245169574800468911/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-18-36_861_7446237204560549185/-mr-10000 POSTHOOK: Lineage: dest_j1.cnt EXPRESSION [(src1)x.null, (src)y.null, ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] 128 1 Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -41,7 +41,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -73,7 +73,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/njain/hive_2010-10-27_11-55-20_756_5340504633113093386/-mr-10003 + directory: file:/tmp/liyintang/hive_2010-11-01_16-18-41_406_742451285643224738/-mr-10003 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -89,9 +89,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src [y] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src [y] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -102,12 +102,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -118,12 +118,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -154,7 +154,7 @@ predicate: expr: (hr = 11) type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col5} {_col0} 1 {value} @@ -167,7 +167,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-55-20_756_5340504633113093386/-mr-10003 + file:/tmp/liyintang/hive_2010-11-01_16-18-41_406_742451285643224738/-mr-10003 Select Operator expressions: expr: _col0 @@ -210,9 +210,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -222,12 +222,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205720 + transient_lastDdlTime 1288653521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -237,9 +237,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - file:/tmp/njain/hive_2010-10-27_11-55-20_756_5340504633113093386/-mr-10003 [file:/tmp/njain/hive_2010-10-27_11-55-20_756_5340504633113093386/-mr-10003] + file:/tmp/liyintang/hive_2010-11-01_16-18-41_406_742451285643224738/-mr-10003 [file:/tmp/liyintang/hive_2010-11-01_16-18-41_406_742451285643224738/-mr-10003] Path -> Partition: - file:/tmp/njain/hive_2010-10-27_11-55-20_756_5340504633113093386/-mr-10003 + file:/tmp/liyintang/hive_2010-11-01_16-18-41_406_742451285643224738/-mr-10003 Partition base file name: -mr-10003 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -263,14 +263,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -280,28 +280,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205720 + transient_lastDdlTime 1288653521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -312,12 +312,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205720 + transient_lastDdlTime 1288653521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -325,9 +325,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-20_756_5340504633113093386/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-41_406_742451285643224738/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -338,12 +338,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205720 + transient_lastDdlTime 1288653521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -354,12 +354,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205720 + transient_lastDdlTime 1288653521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -389,11 +389,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-28_452_2756269730845423706/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-18-54_629_7624609061921813274/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-28_452_2756269730845423706/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-18-54_629_7624609061921813274/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -37,7 +37,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -69,7 +69,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/njain/hive_2010-10-27_11-55-32_021_2958721579896900232/-mr-10002 + directory: file:/tmp/liyintang/hive_2010-11-01_16-18-59_176_7464825643609999321/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -85,9 +85,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src [y] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src [y] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -98,12 +98,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -114,12 +114,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -127,7 +127,7 @@ Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-55-32_021_2958721579896900232/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-18-59_176_7464825643609999321/-mr-10002 Select Operator expressions: expr: _col0 @@ -184,10 +184,10 @@ type: string Needs Tagging: true Path -> Alias: - file:/tmp/njain/hive_2010-10-27_11-55-32_021_2958721579896900232/-mr-10002 [file:/tmp/njain/hive_2010-10-27_11-55-32_021_2958721579896900232/-mr-10002] - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + file:/tmp/liyintang/hive_2010-11-01_16-18-59_176_7464825643609999321/-mr-10002 [file:/tmp/liyintang/hive_2010-11-01_16-18-59_176_7464825643609999321/-mr-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - file:/tmp/njain/hive_2010-10-27_11-55-32_021_2958721579896900232/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-18-59_176_7464825643609999321/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -203,7 +203,7 @@ columns _col0,_col1,_col5 columns.types string,string,string escape.delim \ - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -217,13 +217,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205423 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -234,13 +234,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205423 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -265,9 +265,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-32_021_2958721579896900232/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-59_176_7464825643609999321/-ext-10000 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-32_021_2958721579896900232/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-59_176_7464825643609999321/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -277,12 +277,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205731 + transient_lastDdlTime 1288653539 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -293,7 +293,7 @@ Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-32_021_2958721579896900232/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-59_176_7464825643609999321/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -303,19 +303,19 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205731 + transient_lastDdlTime 1288653539 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-32_021_2958721579896900232/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-59_176_7464825643609999321/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-32_021_2958721579896900232/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-18-59_176_7464825643609999321/-ext-10000/ PREHOOK: query: INSERT OVERWRITE TABLE dest_j1 @@ -342,11 +342,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-40_441_3273672303306599817/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-19-12_873_2128162473933840911/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-40_441_3273672303306599817/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-19-12_873_2128162473933840911/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -47,7 +47,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col1} 1 {key} {value} @@ -115,9 +115,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -127,12 +127,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205743 + transient_lastDdlTime 1288653557 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -193,9 +193,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -205,12 +205,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205743 + transient_lastDdlTime 1288653557 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -220,9 +220,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x, null-subquery2:subq1-subquery2:x1] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x, null-subquery2:subq1-subquery2:x1] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -233,12 +233,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -249,12 +249,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -266,14 +266,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -283,28 +283,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205743 + transient_lastDdlTime 1288653557 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -315,12 +315,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205743 + transient_lastDdlTime 1288653557 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -328,9 +328,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-43_964_5226296681156041746/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-17_435_5047227894689177140/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -341,12 +341,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205743 + transient_lastDdlTime 1288653557 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -357,12 +357,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205743 + transient_lastDdlTime 1288653557 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -398,11 +398,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-49_613_5191052733380652366/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-19-27_095_7252017921731635236/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-55-49_613_5191052733380652366/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-19-27_095_7252017921731635236/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)x.FieldSchema(name:value, type:string, comment:default), (src)x1.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)x.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -83,9 +83,9 @@ type: bigint Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -96,12 +96,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -112,12 +112,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -141,7 +141,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10002 + directory: file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -165,7 +165,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col1} 1 {key} {value} @@ -178,7 +178,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10002 Union Map Join Operator condition map: @@ -222,9 +222,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -234,18 +234,18 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205754 + transient_lastDdlTime 1288653571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 GatherStats: true MultiFileSpray: false - file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10004 + file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10004 Union Map Join Operator condition map: @@ -289,9 +289,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -301,12 +301,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205754 + transient_lastDdlTime 1288653571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -316,10 +316,10 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10002 [file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10002] - file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10004 [file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10004] + file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10002 [file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10002] + file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10004 [file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10004] Path -> Partition: - file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -335,7 +335,7 @@ columns _col0,_col1 columns.types string,bigint escape.delim \ - file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10004 + file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10004 Partition base file name: -mr-10004 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -359,14 +359,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003 - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -376,28 +376,28 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205754 + transient_lastDdlTime 1288653571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10001 Stage: Stage-3 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10000/ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -408,12 +408,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205754 + transient_lastDdlTime 1288653571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -421,9 +421,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003 [pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-55-54_179_6477900064401201039/-ext-10003 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-19-31_744_8355884033924717082/-ext-10003 Partition base file name: -ext-10003 input format: org.apache.hadoop.mapred.TextInputFormat @@ -434,12 +434,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205754 + transient_lastDdlTime 1288653571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -450,12 +450,12 @@ columns.types string:string:int file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205754 + transient_lastDdlTime 1288653571 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -505,9 +505,9 @@ type: bigint Needs Tagging: false Path -> Alias: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1] Path -> Partition: - pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat @@ -518,12 +518,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -534,12 +534,12 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/njain/hive-commit1/build/ql/test/data/warehouse/src + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/src name src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288205427 + transient_lastDdlTime 1288649687 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: src name: src @@ -563,7 +563,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/njain/hive_2010-10-27_11-55-54_179_6477900064401201039/-mr-10004 + directory: file:/tmp/liyintang/hive_2010-11-01_16-19-31_744_8355884033924717082/-mr-10004 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -607,11 +607,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-56-06_687_1264215794989002476/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-19-51_301_299922359106585049/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-56-06_687_1264215794989002476/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-19-51_301_299922359106585049/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 EXPRESSION [(src)x.null, (src)x1.null, ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)x.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join36.q.out =================================================================== --- ql/src/test/results/clientpositive/join36.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join36.q.out (working copy) @@ -76,7 +76,7 @@ x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {cnt} 1 {cnt} @@ -140,7 +140,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-56-20_127_6380703738706956749/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-20-10_828_5833005550627997813/-ext-10000 Stage: Stage-0 Move Operator @@ -158,7 +158,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-56-20_127_6380703738706956749/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-20-10_828_5833005550627997813/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -193,11 +193,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-56-26_754_2259038051216019843/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-19_365_3911030691232821209/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-56-26_754_2259038051216019843/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-19_365_3911030691232821209/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(tmp1)x.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(tmp2)y.FieldSchema(name:cnt, type:int, comment:null), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(tmp1)x.FieldSchema(name:cnt, type:int, comment:null), ] Index: ql/src/test/results/clientpositive/join37.q.out =================================================================== --- ql/src/test/results/clientpositive/join37.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join37.q.out (working copy) @@ -36,7 +36,7 @@ x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} @@ -109,7 +109,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-56-29_885_2306134457319021637/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-20-24_383_9005766768233481307/-ext-10000 Stage: Stage-0 Move Operator @@ -127,7 +127,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-56-29_885_2306134457319021637/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-20-24_383_9005766768233481307/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -158,11 +158,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-56-34_279_5727082010829048557/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-34_512_8638859605824703474/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-56-34_279_5727082010829048557/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-34_512_8638859605824703474/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)x.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join38.q.out =================================================================== --- ql/src/test/results/clientpositive/join38.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join38.q.out (working copy) @@ -26,11 +26,11 @@ PREHOOK: query: select * from tmp PREHOOK: type: QUERY PREHOOK: Input: default@tmp -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-04_133_1536500479311783598/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-45_466_5483258472739145763/-mr-10000 POSTHOOK: query: select * from tmp POSTHOOK: type: QUERY POSTHOOK: Input: default@tmp -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-04_133_1536500479311783598/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-45_466_5483258472739145763/-mr-10000 POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col10 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] @@ -89,7 +89,7 @@ a TableScan alias: a - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {value} 1 {col5} {col11} @@ -133,7 +133,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-58-04_322_8814213860188537568/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-20-45_911_2064531513611866633/-mr-10002 Select Operator expressions: expr: _col1 @@ -221,7 +221,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@tmp -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-04_499_6020968433330491483/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-46_049_9070662343235614975/-mr-10000 POSTHOOK: query: FROM src a JOIN tmp b ON (a.key = b.col11) SELECT /*+ MAPJOIN(a) */ a.value, b.col5, count(1) as count where b.col11 = 111 @@ -229,7 +229,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@tmp -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-04_499_6020968433330491483/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-20-46_049_9070662343235614975/-mr-10000 POSTHOOK: Lineage: tmp.col0 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tmp.col10 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join39.q.out =================================================================== --- ql/src/test/results/clientpositive/join39.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join39.q.out (working copy) @@ -51,7 +51,7 @@ expr: value type: string outputColumnNames: _col0, _col1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {_col0} {_col1} @@ -119,7 +119,7 @@ Move Operator files: hdfs directory: true - destination: pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-58-12_180_546438697212415855/-ext-10000 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-20-55_158_2692708189846951967/-ext-10000 Stage: Stage-0 Move Operator @@ -137,7 +137,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/njain/hive-commit1/build/ql/scratchdir/hive_2010-10-27_11-58-12_180_546438697212415855/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-20-55_158_2692708189846951967/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 @@ -167,11 +167,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-17_368_425948306842908675/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-04_716_7416095714366618316/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-17_368_425948306842908675/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-04_716_7416095714366618316/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/join40.q.out =================================================================== --- ql/src/test/results/clientpositive/join40.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join40.q.out (working copy) @@ -100,12 +100,12 @@ FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-27_869_8594307870019576114/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-18_392_5897082215680081754/-mr-10000 POSTHOOK: query: SELECT x.key, x.value, y.key, y.value FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-27_869_8594307870019576114/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-18_392_5897082215680081754/-mr-10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -751,12 +751,12 @@ FROM src src1 JOIN src src2 ON (src1.key = src2.key) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-32_224_2628641681553791073/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-26_677_4557382054282592069/-mr-10000 POSTHOOK: query: select src1.key, src2.value FROM src src1 JOIN src src2 ON (src1.key = src2.key) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-32_224_2628641681553791073/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-26_677_4557382054282592069/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -1904,7 +1904,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-58-37_240_1047427345351102102/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-21-36_796_7929496036487323541/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1952,12 +1952,12 @@ SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-37_348_2776356054970751573/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-36_956_630001766348897683/-mr-10000 POSTHOOK: query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-37_348_2776356054970751573/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-36_956_630001766348897683/-mr-10000 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 100 val_100 NULL NULL NULL NULL 100 val_100 @@ -2633,7 +2633,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-58-44_576_3870847117030797031/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-21-50_636_5292594642408163540/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2681,12 +2681,12 @@ SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-44_694_461427396890734084/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-50_789_1259915712655198638/-mr-10000 POSTHOOK: query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-44_694_461427396890734084/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-21-50_789_1259915712655198638/-mr-10000 NULL NULL NULL NULL 10 val_10 NULL NULL NULL NULL 100 val_100 NULL NULL NULL NULL 100 val_100 @@ -3277,7 +3277,7 @@ expr: value type: string outputColumnNames: _col0, _col1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {_col0} {_col1} @@ -3345,12 +3345,12 @@ FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-51_016_2800726535361834663/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-22-01_836_4567376268493317063/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(y) */ x.key, x.value, y.key, y.value FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-51_016_2800726535361834663/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-22-01_836_4567376268493317063/-mr-10000 238 val_238 NULL NULL 86 val_86 86 val_86 311 val_311 NULL NULL @@ -3984,7 +3984,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/njain/hive_2010-10-27_11-58-54_107_7492093354536600513/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-22-07_677_1656799639212178119/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -4018,9 +4018,9 @@ PREHOOK: query: SELECT COUNT(1) FROM SRC A JOIN SRC B ON (A.KEY=B.KEY) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-54_187_249714887699251169/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-22-07_803_2739042981356261765/-mr-10000 POSTHOOK: query: SELECT COUNT(1) FROM SRC A JOIN SRC B ON (A.KEY=B.KEY) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/njain/hive_2010-10-27_11-58-54_187_249714887699251169/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-22-07_803_2739042981356261765/-mr-10000 1028 Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -44,7 +44,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {value} @@ -59,7 +59,7 @@ TableScan alias: y GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {value} @@ -128,9 +128,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -140,12 +140,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288215315 + transient_lastDdlTime 1288654460 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -155,9 +155,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -171,13 +171,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212591 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -188,13 +188,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212591 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -206,14 +206,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -223,28 +223,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288215315 + transient_lastDdlTime 1288654460 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -255,12 +255,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288215315 + transient_lastDdlTime 1288654460 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -268,9 +268,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-15_336_1336721351939648686/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-20_604_9007798647980257482/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -281,12 +281,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288215315 + transient_lastDdlTime 1288654460 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -297,12 +297,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288215315 + transient_lastDdlTime 1288654460 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -334,11 +334,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-35-20_630_3517475653926100304/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-34-30_347_456310237656337852/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-35-20_630_3517475653926100304/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-34-30_347_456310237656337852/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, type:string, comment:default), ] @@ -541,7 +541,7 @@ TableScan alias: x GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {value} @@ -556,7 +556,7 @@ TableScan alias: y GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {value} @@ -625,9 +625,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -637,7 +637,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 numFiles 1 numPartitions 0 @@ -646,7 +646,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 - transient_lastDdlTime 1288215320 + transient_lastDdlTime 1288654470 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -656,9 +656,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat @@ -672,13 +672,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212591 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -689,13 +689,13 @@ columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcpart + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288212591 + transient_lastDdlTime 1288649676 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart @@ -707,14 +707,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -724,7 +724,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 numFiles 1 numPartitions 0 @@ -733,23 +733,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 - transient_lastDdlTime 1288215320 + transient_lastDdlTime 1288654470 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -760,7 +760,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 numFiles 1 numPartitions 0 @@ -769,7 +769,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 - transient_lastDdlTime 1288215320 + transient_lastDdlTime 1288654470 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 TotalFiles: 1 @@ -777,9 +777,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_14-35-31_518_8323624306284645862/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_16-34-46_942_7292486166810487008/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -790,7 +790,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 numFiles 1 numPartitions 0 @@ -799,7 +799,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 - transient_lastDdlTime 1288215320 + transient_lastDdlTime 1288654470 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -810,7 +810,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dest_j1 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dest_j1 name dest_j1 numFiles 1 numPartitions 0 @@ -819,7 +819,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2125 - transient_lastDdlTime 1288215320 + transient_lastDdlTime 1288654470 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest_j1 name: dest_j1 @@ -858,11 +858,11 @@ PREHOOK: query: select * from dest_j1 x order by x.key PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-35-38_043_2750643574384770263/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-34-56_739_1357041215207005883/-mr-10000 POSTHOOK: query: select * from dest_j1 x order by x.key POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-35-38_043_2750643574384770263/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-34-56_739_1357041215207005883/-mr-10000 POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1_copy)x.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out (working copy) @@ -23,7 +23,7 @@ src TableScan alias: src - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -70,7 +70,7 @@ src1 TableScan alias: src1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col0} 1 @@ -83,7 +83,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_14-46-32_639_5385690485575219943/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-56-02_964_1762503919530503841/-mr-10002 Select Operator expressions: expr: _col0 @@ -151,7 +151,7 @@ src TableScan alias: src - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {ds} 1 @@ -198,7 +198,7 @@ src1 TableScan alias: src1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col2} 1 @@ -211,7 +211,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_14-46-32_758_5972659942965972745/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-56-03_171_1924985780669063174/-mr-10002 Select Operator expressions: expr: _col0 @@ -243,7 +243,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_14-46-32_758_5972659942965972745/-mr-10003 + file:/tmp/liyintang/hive_2010-11-01_16-56-03_171_1924985780669063174/-mr-10003 Select Operator expressions: expr: _col2 @@ -309,7 +309,7 @@ PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-46-32_877_634052141369406072/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-56-03_397_2485472400538940992/-mr-10000 POSTHOOK: query: select /*+MAPJOIN(src, src1) */ count(*) from srcpart join src src on (srcpart.value=src.value) join src src1 on (srcpart.key=src1.key) group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@src @@ -317,6 +317,6 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-46-32_877_634052141369406072/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-56-03_397_2485472400538940992/-mr-10000 5308 5308 Index: ql/src/test/results/clientpositive/mapjoin_subquery.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_subquery.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/mapjoin_subquery.q.out (working copy) @@ -34,7 +34,7 @@ subq:x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -58,7 +58,7 @@ predicate: expr: (hr = 11) type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col0} 1 {value} @@ -145,7 +145,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@src1 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-46-44_452_445213667214370288/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-56-22_114_1117365805466395198/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(z) */ subq.key1, z.value FROM (SELECT /*+ MAPJOIN(x) */ x.key as key1, x.value as value1, y.key as key2, y.value as value2 @@ -155,7 +155,7 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-46-44_452_445213667214370288/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-56-22_114_1117365805466395198/-mr-10000 238 val_238 238 val_238 311 val_311 @@ -302,7 +302,7 @@ subq:x TableScan alias: x - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -326,7 +326,7 @@ predicate: expr: (hr = 11) type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {_col0} 1 {value} @@ -388,7 +388,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_14-46-48_345_6547752928820986559/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_16-56-28_442_7178055512464150906/-mr-10002 Select Operator expressions: expr: _col0 @@ -438,7 +438,7 @@ PREHOOK: Input: default@src PREHOOK: Input: default@src1 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-46-48_451_5103197118611190766/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-56-28_639_2527169909396269769/-mr-10000 POSTHOOK: query: SELECT /*+ MAPJOIN(z) */ subq.key1, z.value FROM (SELECT /*+ MAPJOIN(x) */ x.key as key1, x.value as value1, y.key as key2, y.value as value2 @@ -449,7 +449,7 @@ POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_14-46-48_451_5103197118611190766/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_16-56-28_639_2527169909396269769/-mr-10000 128 val_128 128 val_128 128 val_128 Index: ql/src/test/results/clientpositive/select_transform_hint.q.out =================================================================== --- ql/src/test/results/clientpositive/select_transform_hint.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/select_transform_hint.q.out (working copy) @@ -29,7 +29,7 @@ a TableScan alias: a - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 @@ -96,14 +96,14 @@ on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-02-57_632_2812838087751245658/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-31_739_8624380450121078203/-mr-10000 POSTHOOK: query: SELECT /*+MAPJOIN(a)*/ TRANSFORM(a.key, a.value) USING '/bin/cat' AS (tkey, tvalue) FROM src a join src b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-02-57_632_2812838087751245658/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-31_739_8624380450121078203/-mr-10000 238 val_238 238 val_238 86 val_86 @@ -1223,14 +1223,14 @@ on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-01_031_3277101842984747988/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-36_672_7524820665388433673/-mr-10000 POSTHOOK: query: SELECT /*+STREAMTABLE(a)*/ TRANSFORM(a.key, a.value) USING '/bin/cat' AS (tkey, tvalue) FROM src a join src b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-01_031_3277101842984747988/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-36_672_7524820665388433673/-mr-10000 0 val_0 0 val_0 0 val_0 Index: ql/src/test/results/clientpositive/semijoin.q.out =================================================================== --- ql/src/test/results/clientpositive/semijoin.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/semijoin.q.out (working copy) @@ -8,11 +8,11 @@ PREHOOK: query: select * from t1 sort by key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-07_317_2567121123918340747/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-47_720_5813891247420611907/-mr-10000 POSTHOOK: query: select * from t1 sort by key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-07_317_2567121123918340747/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-47_720_5813891247420611907/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -34,11 +34,11 @@ PREHOOK: query: select * from t2 sort by key PREHOOK: type: QUERY PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-12_609_8186818336637614021/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-55_916_9080722312611172215/-mr-10000 POSTHOOK: query: select * from t2 sort by key POSTHOOK: type: QUERY POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-12_609_8186818336637614021/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-23-55_916_9080722312611172215/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -62,11 +62,11 @@ PREHOOK: query: select * from t3 sort by key, value PREHOOK: type: QUERY PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-20_412_6858946112655112787/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-08_505_1434415232763062051/-mr-10000 POSTHOOK: query: select * from t3 sort by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-20_412_6858946112655112787/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-08_505_1434415232763062051/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -97,11 +97,11 @@ PREHOOK: query: select * from t4 PREHOOK: type: QUERY PREHOOK: Input: default@t4 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-23_040_6896567563297871765/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-12_522_999983483531130511/-mr-10000 POSTHOOK: query: select * from t4 POSTHOOK: type: QUERY POSTHOOK: Input: default@t4 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-23_040_6896567563297871765/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-12_522_999983483531130511/-mr-10000 PREHOOK: query: explain select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY POSTHOOK: query: explain select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value @@ -185,7 +185,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-23_122_1418801017341736948/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-24-12_737_2340176705373139925/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -217,12 +217,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-23_188_6557146902672307691/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-12_869_6087511631643524381/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t2 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-23_188_6557146902672307691/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-12_869_6087511631643524381/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -312,7 +312,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-28_893_6723532886712811629/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-24-20_657_8315209286289513156/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -344,12 +344,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-28_959_3762364132590482286/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-20_781_867633109920817168/-mr-10000 POSTHOOK: query: select * from t2 a left semi join t1 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-28_959_3762364132590482286/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-20_781_867633109920817168/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -441,7 +441,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-34_570_7889009522221032190/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-24-28_695_4079060681105079761/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -473,12 +473,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t4 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-34_654_5556420496433395304/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-28_818_5686219416196301897/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t4 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t4 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-34_654_5556420496433395304/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-28_818_5686219416196301897/-mr-10000 PREHOOK: query: explain select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY POSTHOOK: query: explain select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value @@ -568,7 +568,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-40_011_3808833152580249248/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-24-36_441_8195388778093229606/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -596,12 +596,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-40_079_1282941878916492070/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-36_567_1584321099873424343/-mr-10000 POSTHOOK: query: select a.value from t1 a left semi join t3 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-40_079_1282941878916492070/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-36_567_1584321099873424343/-mr-10000 val_0 val_0 val_0 @@ -708,7 +708,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-45_730_7298072930043033983/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-24-47_865_5192679573524185476/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -740,12 +740,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-45_804_5294995386951265093/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-47_994_2119126598619413953/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t2 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-45_804_5294995386951265093/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-47_994_2119126598619413953/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -841,7 +841,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-51_407_427333938274086877/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-24-58_250_4488614591237221514/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -869,12 +869,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-51_486_1315085131198914442/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-58_378_2369717499836318148/-mr-10000 POSTHOOK: query: select a.value from t1 a left semi join (select key from t3 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-51_486_1315085131198914442/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-24-58_378_2369717499836318148/-mr-10000 val_10 val_8 val_9 @@ -980,7 +980,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-03-57_031_4644400431266147309/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-25-07_579_5834616454207362103/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1008,12 +1008,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-57_105_2695825645438377544/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-07_731_8167456529422749731/-mr-10000 POSTHOOK: query: select a.value from t1 a left semi join (select key , value from t2 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-03-57_105_2695825645438377544/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-07_731_8167456529422749731/-mr-10000 PREHOOK: query: explain select * from t2 a left semi join (select key , value from t1 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY POSTHOOK: query: explain select * from t2 a left semi join (select key , value from t1 where key > 2) b on a.key = b.key sort by a.key, a.value @@ -1110,7 +1110,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-02_466_725728290759835787/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-25-17_730_8845266111299456063/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1142,12 +1142,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-02_537_8369733971186993079/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-17_889_7903296525191924528/-mr-10000 POSTHOOK: query: select * from t2 a left semi join (select key , value from t1 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-02_537_8369733971186993079/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-17_889_7903296525191924528/-mr-10000 4 val_2 8 val_4 10 val_5 @@ -1189,7 +1189,7 @@ type: int mode: hash outputColumnNames: _col0 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -1229,7 +1229,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-08_036_2910952596121389668/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-25-27_200_3669240580405012273/-mr-10002 Select Operator expressions: expr: _col0 @@ -1267,12 +1267,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-08_109_4473865912549348136/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-27_327_5735980346591255829/-mr-10000 POSTHOOK: query: select /*+ mapjoin(b) */ a.key from t3 a left semi join t1 b on a.key = b.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-08_109_4473865912549348136/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-27_327_5735980346591255829/-mr-10000 0 0 0 @@ -1375,7 +1375,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-13_353_2384479425001200067/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-25-35_472_2977708279499054243/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1407,12 +1407,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-13_420_5777699289863486696/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-35_598_257473487014815248/-mr-10000 POSTHOOK: query: select * from t1 a left semi join t2 b on a.key = 2*b.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-13_420_5777699289863486696/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-35_598_257473487014815248/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -1523,7 +1523,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-18_912_2891401415653997089/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-25-44_987_2455437829705626204/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1560,13 +1560,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-18_988_5010287233526168835/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-45_128_7321670510599919973/-mr-10000 POSTHOOK: query: select * from t1 a join t2 b on a.key = b.key left semi join t3 c on b.key = c.key sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-18_988_5010287233526168835/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-45_128_7321670510599919973/-mr-10000 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 @@ -1676,7 +1676,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-24_617_6131522087162404256/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-25-54_562_5539348387867905196/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -1708,12 +1708,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-24_685_7575816128654858988/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-54_692_1448334472190252284/-mr-10000 POSTHOOK: query: select * from t3 a left semi join t1 b on a.key = b.key and a.value=b.value sort by a.key, a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-24_685_7575816128654858988/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-25-54_692_1448334472190252284/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -1767,7 +1767,7 @@ type: int mode: hash outputColumnNames: _col0 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -1793,7 +1793,7 @@ type: int mode: hash outputColumnNames: _col0 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 @@ -1838,7 +1838,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-30_183_6165676837426268350/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-26-02_573_4566864745665130552/-mr-10002 Select Operator expressions: expr: _col0 @@ -1877,13 +1877,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-30_260_2433281365180402598/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-02_723_7508274813116601526/-mr-10000 POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3 a left semi join t1 b on a.key = b.key left semi join t2 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-30_260_2433281365180402598/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-02_723_7508274813116601526/-mr-10000 0 0 0 @@ -1991,7 +1991,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-35_797_2333906634777100148/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-26-12_897_3209145763541196635/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2020,13 +2020,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-35_871_7897675469131546389/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-13_040_1661947073418093026/-mr-10000 POSTHOOK: query: select a.key from t3 a left outer join t1 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-35_871_7897675469131546389/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-13_040_1661947073418093026/-mr-10000 0 0 0 @@ -2146,7 +2146,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-41_608_2220388568134618866/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-26-22_373_752075940061133785/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2175,13 +2175,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-41_682_2882263810051097750/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-22_511_7489586349667152619/-mr-10000 POSTHOOK: query: select a.key from t1 a right outer join t3 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-41_682_2882263810051097750/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-22_511_7489586349667152619/-mr-10000 NULL NULL NULL @@ -2304,7 +2304,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-48_006_8185934732491634852/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-26-34_460_3318364188159488316/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2333,13 +2333,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-48_081_2028444012866190155/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-34_598_1107996313738604442/-mr-10000 POSTHOOK: query: select a.key from t1 a full outer join t3 b on a.key = b.key left semi join t2 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-48_081_2028444012866190155/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-34_598_1107996313738604442/-mr-10000 NULL NULL NULL @@ -2462,7 +2462,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-53_653_9010857136935696124/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-26-43_944_3107902523928327694/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2491,13 +2491,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-53_728_7613723015009146650/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-44_097_7033015682413583193/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-53_728_7613723015009146650/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-44_097_7033015682413583193/-mr-10000 0 0 0 @@ -2620,7 +2620,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-04-59_367_1281949091297790008/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-26-53_398_925204448264973815/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2649,13 +2649,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-59_442_8906799826368408100/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-53_536_6435582090149919411/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key right outer join t1 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-04-59_442_8906799826368408100/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-26-53_536_6435582090149919411/-mr-10000 NULL NULL NULL @@ -2780,7 +2780,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-05-08_027_9057656913862672973/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-27-03_962_9136088786460782033/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -2809,13 +2809,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-05-08_102_1904204088263319648/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-27-04_100_6495562209758724756/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t1 b on a.key = b.key full outer join t2 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-05-08_102_1904204088263319648/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-27-04_100_6495562209758724756/-mr-10000 NULL NULL NULL @@ -2984,7 +2984,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-05-13_732_1908006251522430032/-mr-10003 + file:/tmp/liyintang/hive_2010-11-01_17-27-13_399_6289350806081061373/-mr-10003 Reduce Output Operator key expressions: expr: _col0 @@ -3013,13 +3013,13 @@ PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-05-13_810_2454226698258102980/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-27-13_542_925251409717229498/-mr-10000 POSTHOOK: query: select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-05-13_810_2454226698258102980/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-27-13_542_925251409717229498/-mr-10000 0 0 0 Index: ql/src/test/results/clientpositive/skewjoin.q.out =================================================================== --- ql/src/test/results/clientpositive/skewjoin.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/skewjoin.q.out (working copy) @@ -139,7 +139,7 @@ limit: -1 Alias -> Map Local Operator Tree: 1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {0_VALUE_0} 1 {1_VALUE_0} @@ -219,11 +219,11 @@ PREHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1 PREHOOK: type: QUERY PREHOOK: Input: default@dest_j1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-19_102_8796456594111978797/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-30-04_167_500946518178798703/-mr-10000 POSTHOOK: query: SELECT sum(hash(key)), sum(hash(value)) FROM dest_j1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-19_102_8796456594111978797/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-30-04_167_500946518178798703/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 278697 101852390308 @@ -373,7 +373,7 @@ PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 PREHOOK: Input: default@t4 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-35_021_282318098948564636/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-30-55_117_7907817004764400150/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key @@ -383,7 +383,7 @@ POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 POSTHOOK: Input: default@t4 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-35_021_282318098948564636/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-30-55_117_7907817004764400150/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 2 12 2 22 2 12 2 12 @@ -533,7 +533,7 @@ PREHOOK: Input: default@t2 PREHOOK: Input: default@t3 PREHOOK: Input: default@t4 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-38_270_4680889030113901410/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-31-01_441_1745801074574810830/-mr-10000 POSTHOOK: query: SELECT /*+ STREAMTABLE(a,c) */ * FROM T1 a JOIN T2 b ON a.key = b.key JOIN T3 c ON b.key = c.key @@ -543,7 +543,7 @@ POSTHOOK: Input: default@t2 POSTHOOK: Input: default@t3 POSTHOOK: Input: default@t4 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-38_270_4680889030113901410/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-31-01_441_1745801074574810830/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 2 12 2 22 2 12 2 12 @@ -633,7 +633,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-06-41_304_411032267288143321/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-31-07_599_2512978463224495744/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -678,12 +678,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-41_378_6980884094838374214/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-31-07_731_345177418593757365/-mr-10000 POSTHOOK: query: FROM T1 a JOIN src c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-41_378_6980884094838374214/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-31-07_731_345177418593757365/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 198 6274 194 @@ -800,7 +800,7 @@ limit: -1 Alias -> Map Local Operator Tree: 1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 1 {1_VALUE_0} {1_VALUE_1} @@ -852,7 +852,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-06-47_083_7583033798461542975/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-31-16_611_9163129478933432249/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -896,7 +896,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-47_255_6693309924800490257/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-31-16_891_3977031664878001305/-mr-10000 POSTHOOK: query: FROM (SELECT src.* FROM src) x JOIN @@ -905,7 +905,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-06-47_255_6693309924800490257/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-31-16_891_3977031664878001305/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 44481300 101852390308 @@ -1032,7 +1032,7 @@ limit: -1 Alias -> Map Local Operator Tree: 1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 1 {1_VALUE_0} {1_VALUE_1} @@ -1084,7 +1084,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-07-15_195_639976818225884535/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-32-28_863_2988762902545824975/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -1128,7 +1128,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-07-15_370_1979544668421346379/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-32-29_163_1008116551230331591/-mr-10000 POSTHOOK: query: FROM (SELECT src.* FROM src) x JOIN @@ -1137,7 +1137,7 @@ SELECT sum(hash(Y.key)), sum(hash(Y.value)) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-07-15_370_1979544668421346379/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-32-29_163_1008116551230331591/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] NULL NULL @@ -1303,7 +1303,7 @@ limit: -1 Alias -> Map Local Operator Tree: 1 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {0_VALUE_0} 1 {1_VALUE_0} @@ -1315,7 +1315,7 @@ 2 [Column[joinkey0]] Position of Big Table: 0 2 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {0_VALUE_0} 1 {1_VALUE_0} @@ -1372,7 +1372,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-07-40_939_791224492830751906/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-33-37_575_4275624209381623160/-mr-10002 Reduce Output Operator sort order: tag: -1 @@ -1414,7 +1414,7 @@ limit: -1 Alias -> Map Local Operator Tree: 0 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {0_VALUE_0} 1 {1_VALUE_0} @@ -1426,7 +1426,7 @@ 2 [Column[joinkey0]] Position of Big Table: 1 2 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {0_VALUE_0} 1 {1_VALUE_0} @@ -1496,7 +1496,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-07-41_289_4528597985146531444/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-33-38_153_3676041644693835594/-mr-10000 POSTHOOK: query: SELECT sum(hash(src1.c1)), sum(hash(src2.c4)) FROM (SELECT src.key as c1, src.value as c2 from src) src1 @@ -1508,7 +1508,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-07-41_289_4528597985146531444/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-33-38_153_3676041644693835594/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 293143 -136853010385 @@ -1540,7 +1540,7 @@ v TableScan alias: v - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} 1 {val} @@ -1580,7 +1580,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-08-11_023_4952961378451488185/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_17-35-12_015_5578666091441415300/-mr-10002 Select Operator expressions: expr: _col0 @@ -1640,55 +1640,55 @@ PREHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-11_093_7547555083303240518/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-12_145_4763018761752437191/-mr-10000 POSTHOOK: query: SELECT /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) FROM T1 k LEFT OUTER JOIN T1 v ON k.key+1=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-11_093_7547555083303240518/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-12_145_4763018761752437191/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 372 6320 PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.val PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-16_379_6728265708807350086/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-20_379_5057348908779826938/-mr-10000 POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.val POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-16_379_6728265708807350086/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-20_379_5057348908779826938/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] NULL NULL PREHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-21_607_6541620241735835708/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-28_506_1425032574320408927/-mr-10000 POSTHOOK: query: select /*+ mapjoin(k)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-21_607_6541620241735835708/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-28_506_1425032574320408927/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 429 12643 PREHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-27_482_5011839329231174267/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-37_791_7645010452752190198/-mr-10000 POSTHOOK: query: select sum(hash(k.key)), sum(hash(v.val)) from T1 k join T1 v on k.key=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-27_482_5011839329231174267/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-37_791_7645010452752190198/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 429 12643 PREHOOK: query: select count(1) from T1 a join T1 b on a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-35_146_3336063612709764146/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-52_548_6822795245377377433/-mr-10000 POSTHOOK: query: select count(1) from T1 a join T1 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-35_146_3336063612709764146/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-35-52_548_6822795245377377433/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 8 @@ -1696,12 +1696,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-42_859_1470660249312294617/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-05_320_3171317965917500669/-mr-10000 POSTHOOK: query: FROM T1 a LEFT OUTER JOIN T2 c ON c.key+1=a.key SELECT sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-42_859_1470660249312294617/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-05_320_3171317965917500669/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 317 9462 50 @@ -1709,12 +1709,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-48_569_1389223424240589811/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-15_203_3570805405369150718/-mr-10000 POSTHOOK: query: FROM T1 a RIGHT OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-48_569_1389223424240589811/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-15_203_3570805405369150718/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 51 1570 318 @@ -1722,12 +1722,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-53_884_193002294179789118/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-24_451_7645343040963409809/-mr-10000 POSTHOOK: query: FROM T1 a FULL OUTER JOIN T2 c ON c.key+1=a.key SELECT /*+ STREAMTABLE(a) */ sum(hash(a.key)), sum(hash(a.val)), sum(hash(c.key)) POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-53_884_193002294179789118/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-24_451_7645343040963409809/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 317 9462 318 @@ -1735,12 +1735,12 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-59_493_4647924292571801277/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-33_707_7322297395017895599/-mr-10000 POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 LEFT OUTER JOIN T2 src2 ON src1.key+1 = src2.key RIGHT OUTER JOIN T2 src3 ON src2.key = src3.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-08-59_493_4647924292571801277/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-33_707_7322297395017895599/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 370 11003 377 @@ -1748,23 +1748,23 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-09-04_974_5598263675862728012/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-42_920_2294735429138060300/-mr-10000 POSTHOOK: query: SELECT sum(hash(src1.key)), sum(hash(src1.val)), sum(hash(src2.key)) FROM T1 src1 JOIN T2 src2 ON src1.key+1 = src2.key JOIN T2 src3 ON src2.key = src3.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-09-04_974_5598263675862728012/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-36-42_920_2294735429138060300/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 370 11003 377 PREHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k left outer join T1 v on k.key+1=v.key PREHOOK: type: QUERY PREHOOK: Input: default@t1 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-09-15_753_4549651274151249293/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-37-02_933_5574498172753792291/-mr-10000 POSTHOOK: query: select /*+ mapjoin(v)*/ sum(hash(k.key)), sum(hash(v.val)) from T1 k left outer join T1 v on k.key+1=v.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-09-15_753_4549651274151249293/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-37-02_933_5574498172753792291/-mr-10000 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ] 372 6320 Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -109,7 +109,7 @@ predicate: expr: (ds = '2008-04-08') type: boolean - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} {ds} @@ -122,10 +122,10 @@ Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket21.txt=[srcbucket21.txt, srcbucket23.txt]} Alias Bucket File Name Mapping: - b {pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} + b {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt 1 Stage: Stage-1 Map Reduce @@ -174,9 +174,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -186,12 +186,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217681 + transient_lastDdlTime 1288658786 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -201,9 +201,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin [a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin [a] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin Partition base file name: srcbucket_mapjoin input format: org.apache.hadoop.mapred.TextInputFormat @@ -215,12 +215,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217678 + transient_lastDdlTime 1288658776 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -232,12 +232,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin name srcbucket_mapjoin serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217678 + transient_lastDdlTime 1288658776 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin name: srcbucket_mapjoin @@ -249,14 +249,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -266,28 +266,28 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217681 + transient_lastDdlTime 1288658786 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -298,12 +298,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217681 + transient_lastDdlTime 1288658786 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -311,9 +311,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-14-41_786_1818524795371776922/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-46-26_874_6100058649459904137/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -324,12 +324,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217681 + transient_lastDdlTime 1288658786 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -340,12 +340,12 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217681 + transient_lastDdlTime 1288658786 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -373,11 +373,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-14-50_095_3921503802679188237/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-46-42_094_2589522318901851300/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-14-50_095_3921503802679188237/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-46-42_094_2589522318901851300/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_tmp_result.key SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value1 SIMPLE [(srcbucket_mapjoin)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_tmp_result.value2 SIMPLE [(srcbucket_mapjoin_part)b.FieldSchema(name:value, type:string, comment:null), ] @@ -426,11 +426,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-09_956_1467526469974049835/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-47-06_353_2042879204572636231/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-09_956_1467526469974049835/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-47-06_353_2042879204572636231/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -469,14 +469,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-16_402_1274526735492717452/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-47-17_430_6320028914073363150/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-16_402_1274526735492717452/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-47-17_430_6320028914073363150/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -538,7 +538,7 @@ TableScan alias: a GatherStats: false - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {key} {value} 1 {value} {ds} @@ -551,12 +551,12 @@ Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt]} Alias Bucket File Name Mapping: - a {pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} + a {pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt], pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt]} Alias Bucket Output File Name Mapping: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3 Stage: Stage-1 Map Reduce @@ -610,9 +610,9 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10002 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10002 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -622,7 +622,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -631,7 +631,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288217709 + transient_lastDdlTime 1288658826 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -641,9 +641,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 Partition base file name: ds=2008-04-08 input format: org.apache.hadoop.mapred.TextInputFormat @@ -657,13 +657,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217678 + transient_lastDdlTime 1288658776 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -675,13 +675,13 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/srcbucket_mapjoin_part + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/srcbucket_mapjoin_part name srcbucket_mapjoin_part partition_columns ds serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1288217678 + transient_lastDdlTime 1288658776 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcbucket_mapjoin_part name: srcbucket_mapjoin_part @@ -693,14 +693,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10002 - destination: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10002 + destination: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -710,7 +710,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -719,23 +719,23 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288217709 + transient_lastDdlTime 1288658826 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -746,7 +746,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -755,7 +755,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288217709 + transient_lastDdlTime 1288658826 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result TotalFiles: 1 @@ -763,9 +763,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10002 [pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10002 [pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10002] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-15-19_194_3473668854136624368/-ext-10002 + pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_17-47-23_195_4053056800816209473/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -776,7 +776,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -785,7 +785,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288217709 + transient_lastDdlTime 1288658826 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -796,7 +796,7 @@ columns.types string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/bucketmapjoin_tmp_result name bucketmapjoin_tmp_result numFiles 1 numPartitions 0 @@ -805,7 +805,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 8983 - transient_lastDdlTime 1288217709 + transient_lastDdlTime 1288658826 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: bucketmapjoin_tmp_result name: bucketmapjoin_tmp_result @@ -845,11 +845,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-27_883_5821584311616966454/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-47-41_006_5971283724594800541/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-27_883_5821584311616966454/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-47-41_006_5971283724594800541/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value2 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value2, type:string, comment:null), ] @@ -934,11 +934,11 @@ PREHOOK: query: select count(1) from bucketmapjoin_tmp_result PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_tmp_result -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-42_761_6730929785597192047/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-48-09_034_8242927422993813931/-mr-10000 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_tmp_result -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-42_761_6730929785597192047/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-48-09_034_8242927422993813931/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] @@ -1001,14 +1001,14 @@ PREHOOK: type: QUERY PREHOOK: Input: default@bucketmapjoin_hash_result_1 PREHOOK: Input: default@bucketmapjoin_hash_result_2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-49_261_7311186777068701827/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-48-20_189_4801003270655643439/-mr-10000 POSTHOOK: query: select a.key-b.key, a.value1-b.value1, a.value2-b.value2 from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b on a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucketmapjoin_hash_result_1 POSTHOOK: Input: default@bucketmapjoin_hash_result_2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-15-49_261_7311186777068701827/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_17-48-20_189_4801003270655643439/-mr-10000 POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.key EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: bucketmapjoin_hash_result_1.value1 EXPRESSION [(bucketmapjoin_tmp_result)bucketmapjoin_tmp_result.FieldSchema(name:value1, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 1029845) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -119,7 +119,7 @@ expr: k4 type: string outputColumnNames: _col1, _col3, _col4 - JDBM Sink Operator + Hash Table Sink Operator condition expressions: 0 {k1} {k2} 1 {_col3} {_col4} @@ -162,7 +162,7 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/heyongqiang/hive_2010-10-27_15-39-47_471_6619420927562677446/-mr-10002 + directory: file:/tmp/liyintang/hive_2010-11-01_18-26-12_614_1768655273205245726/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -178,9 +178,9 @@ Map Reduce Local Work Needs Tagging: false Path -> Alias: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1 [null-subquery2:subq-subquery2:a] Path -> Partition: - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22/ds=1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22/ds=1 Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -193,7 +193,7 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 numFiles 1 numPartitions 1 @@ -203,7 +203,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 - transient_lastDdlTime 1288219182 + transient_lastDdlTime 1288661166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -214,7 +214,7 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 numFiles 1 numPartitions 1 @@ -224,7 +224,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 - transient_lastDdlTime 1288219182 + transient_lastDdlTime 1288661166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 name: dst_union22 @@ -232,7 +232,7 @@ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - file:/tmp/heyongqiang/hive_2010-10-27_15-39-47_471_6619420927562677446/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_18-26-12_614_1768655273205245726/-mr-10002 Select Operator expressions: expr: _col0 @@ -275,10 +275,10 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10000 NumFilesPerFileSink: 1 Static Partition Specification: ds=2/ - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -288,7 +288,7 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 numFiles 1 numPartitions 1 @@ -298,7 +298,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 - transient_lastDdlTime 1288219182 + transient_lastDdlTime 1288661166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 TotalFiles: 1 @@ -344,10 +344,10 @@ File Output Operator compressed: false GlobalTableId: 1 - directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10000 + directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10000 NumFilesPerFileSink: 1 Static Partition Specification: ds=2/ - Stats Publishing Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10000/ + Stats Publishing Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -357,7 +357,7 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 numFiles 1 numPartitions 1 @@ -367,7 +367,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 - transient_lastDdlTime 1288219182 + transient_lastDdlTime 1288661166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 TotalFiles: 1 @@ -375,10 +375,10 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - file:/tmp/heyongqiang/hive_2010-10-27_15-39-47_471_6619420927562677446/-mr-10002 [file:/tmp/heyongqiang/hive_2010-10-27_15-39-47_471_6619420927562677446/-mr-10002] - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta] + file:/tmp/liyintang/hive_2010-11-01_18-26-12_614_1768655273205245726/-mr-10002 [file:/tmp/liyintang/hive_2010-11-01_18-26-12_614_1768655273205245726/-mr-10002] + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 [null-subquery1:subq-subquery1:dst_union22_delta] Path -> Partition: - file:/tmp/heyongqiang/hive_2010-10-27_15-39-47_471_6619420927562677446/-mr-10002 + file:/tmp/liyintang/hive_2010-11-01_18-26-12_614_1768655273205245726/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -394,7 +394,7 @@ columns _col0,_col1,_col10,_col11 columns.types string,string,string,string escape.delim \ - pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22_delta/ds=1 + pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta/ds=1 Partition base file name: ds=1 input format: org.apache.hadoop.mapred.TextInputFormat @@ -407,7 +407,7 @@ columns.types string:string:string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22_delta + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta name dst_union22_delta numFiles 1 numPartitions 1 @@ -417,7 +417,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17436 - transient_lastDdlTime 1288219187 + transient_lastDdlTime 1288661172 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -428,7 +428,7 @@ columns.types string:string:string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22_delta + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22_delta name dst_union22_delta numFiles 1 numPartitions 1 @@ -438,7 +438,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 17436 - transient_lastDdlTime 1288219187 + transient_lastDdlTime 1288661172 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22_delta name: dst_union22_delta @@ -449,7 +449,7 @@ partition: ds 2 replace: true - source: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10000 + source: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -459,7 +459,7 @@ columns.types string:string:string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/heyongqiang/hive-indexing/build/ql/test/data/warehouse/dst_union22 + location pfile:/data/users/liyintang/hive-trunk/build/ql/test/data/warehouse/dst_union22 name dst_union22 numFiles 1 numPartitions 1 @@ -469,14 +469,14 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 11624 - transient_lastDdlTime 1288219182 + transient_lastDdlTime 1288661166 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dst_union22 - tmp directory: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10001 + tmp directory: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10001 Stage: Stage-4 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/heyongqiang/hive-indexing/build/ql/scratchdir/hive_2010-10-27_15-39-47_471_6619420927562677446/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/data/users/liyintang/hive-trunk/build/ql/scratchdir/hive_2010-11-01_18-26-12_614_1768655273205245726/-ext-10000/ PREHOOK: query: insert overwrite table dst_union22 partition (ds='2') select * from @@ -525,11 +525,11 @@ PREHOOK: query: select * from dst_union22 where ds = '2' order by k1 PREHOOK: type: QUERY PREHOOK: Input: default@dst_union22@ds=2 -PREHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-39-55_296_3770070825629131400/-mr-10000 +PREHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_18-26-26_922_3120188372893314397/-mr-10000 POSTHOOK: query: select * from dst_union22 where ds = '2' order by k1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dst_union22@ds=2 -POSTHOOK: Output: file:/tmp/heyongqiang/hive_2010-10-27_15-39-55_296_3770070825629131400/-mr-10000 +POSTHOOK: Output: file:/tmp/liyintang/hive_2010-11-01_18-26-26_922_3120188372893314397/-mr-10000 POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dst_union22 PARTITION(ds=1).k3 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]