Index: ql/src/test/results/clientpositive/load_overwrite.q.out =================================================================== --- ql/src/test/results/clientpositive/load_overwrite.q.out (revision 0) +++ ql/src/test/results/clientpositive/load_overwrite.q.out (revision 0) @@ -0,0 +1,125 @@ +PREHOOK: query: create table load_overwrite like src +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table load_overwrite like src +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@load_overwrite +PREHOOK: query: insert overwrite table load_overwrite select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@load_overwrite +POSTHOOK: query: insert overwrite table load_overwrite select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@load_overwrite +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: show table extended like load_overwrite +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like load_overwrite +POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +tableName:load_overwrite +owner:null +location:pfile:/data/users/nzhang/work/2/apache-hive/build/ql/test/data/warehouse/load_overwrite +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { string key, string value} +partitioned:false +partitionColumns: +totalNumberFiles:1 +totalFileSize:5812 +maxFileSize:5812 +minFileSize:5812 +lastAccessTime:0 +lastUpdateTime:1292465082000 + +PREHOOK: query: select count(*) from load_overwrite +PREHOOK: type: QUERY +PREHOOK: Input: default@load_overwrite +PREHOOK: Output: file:/tmp/nzhang/hive_2010-12-15_18-04-43_366_2197078026513947066/-mr-10000 +POSTHOOK: query: select count(*) from load_overwrite +POSTHOOK: type: QUERY +POSTHOOK: Input: default@load_overwrite +POSTHOOK: Output: file:/tmp/nzhang/hive_2010-12-15_18-04-43_366_2197078026513947066/-mr-10000 +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +500 +PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table load_overwrite +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table load_overwrite +POSTHOOK: type: LOAD +POSTHOOK: Output: default@load_overwrite +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: show table extended like load_overwrite +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like load_overwrite +POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +tableName:load_overwrite +owner:null +location:pfile:/data/users/nzhang/work/2/apache-hive/build/ql/test/data/warehouse/load_overwrite +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { string key, string value} +partitioned:false +partitionColumns: +totalNumberFiles:2 +totalFileSize:11624 +maxFileSize:5812 +minFileSize:5812 +lastAccessTime:0 +lastUpdateTime:1292465086000 + +PREHOOK: query: select count(*) from load_overwrite +PREHOOK: type: QUERY +PREHOOK: Input: default@load_overwrite +PREHOOK: Output: file:/tmp/nzhang/hive_2010-12-15_18-04-46_973_8511923253102164649/-mr-10000 +POSTHOOK: query: select count(*) from load_overwrite +POSTHOOK: type: QUERY +POSTHOOK: Input: default@load_overwrite +POSTHOOK: Output: file:/tmp/nzhang/hive_2010-12-15_18-04-46_973_8511923253102164649/-mr-10000 +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +1000 +PREHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table load_overwrite +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath '../data/files/kv1.txt' overwrite into table load_overwrite +POSTHOOK: type: LOAD +POSTHOOK: Output: default@load_overwrite +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: show table extended like load_overwrite +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like load_overwrite +POSTHOOK: type: SHOW_TABLESTATUS +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +tableName:load_overwrite +owner:null +location:pfile:/data/users/nzhang/work/2/apache-hive/build/ql/test/data/warehouse/load_overwrite +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { string key, string value} +partitioned:false +partitionColumns: +totalNumberFiles:1 +totalFileSize:5812 +maxFileSize:5812 +minFileSize:5812 +lastAccessTime:0 +lastUpdateTime:1292465090000 + +PREHOOK: query: select count(*) from load_overwrite +PREHOOK: type: QUERY +PREHOOK: Input: default@load_overwrite +PREHOOK: Output: file:/tmp/nzhang/hive_2010-12-15_18-04-50_537_1323277203199221916/-mr-10000 +POSTHOOK: query: select count(*) from load_overwrite +POSTHOOK: type: QUERY +POSTHOOK: Input: default@load_overwrite +POSTHOOK: Output: file:/tmp/nzhang/hive_2010-12-15_18-04-50_537_1323277203199221916/-mr-10000 +POSTHOOK: Lineage: load_overwrite.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: load_overwrite.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +500 Index: ql/src/test/queries/clientpositive/load_overwrite.q =================================================================== --- ql/src/test/queries/clientpositive/load_overwrite.q (revision 0) +++ ql/src/test/queries/clientpositive/load_overwrite.q (revision 0) @@ -0,0 +1,15 @@ +create table load_overwrite like src; + +insert overwrite table load_overwrite select * from src; +show table extended like load_overwrite; +select count(*) from load_overwrite; + + +load data local inpath '../data/files/kv1.txt' into table load_overwrite; +show table extended like load_overwrite; +select count(*) from load_overwrite; + + +load data local inpath '../data/files/kv1.txt' overwrite into table load_overwrite; +show table extended like load_overwrite; +select count(*) from load_overwrite; Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1049722) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -1545,13 +1545,9 @@ try { fs.mkdirs(tmppath); for (FileStatus src : srcs) { - FileStatus[] items = fs.listStatus(src.getPath()); - for (int j = 0; j < items.length; j++) { - if (!fs.rename(items[j].getPath(), new Path(tmppath, items[j] - .getPath().getName()))) { - throw new HiveException("Error moving: " + items[j].getPath() + if (!fs.rename(src.getPath(), tmppath)) { + throw new HiveException("Error moving: " + src.getPath() + " into: " + tmppath); - } } }