diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 7019f4c..efe827c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -60,8 +60,10 @@ import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.common.ObjectPair; @@ -3276,7 +3278,15 @@ public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, final Path destFile = new Path(destf, srcStatus.getPath().getName()); if (null == pool) { - if(!destFs.rename(srcStatus.getPath(), destFile)) { + boolean success = false; + if (destFs instanceof DistributedFileSystem) { + ((DistributedFileSystem) destFs).rename(srcStatus.getPath(), destFile, Options.Rename.OVERWRITE); + success = true; + } else { + destFs.delete(destFile, false); + success = destFs.rename(srcStatus.getPath(), destFile); + } + if (!success) { throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest:" + destf + " returned false"); } @@ -3285,8 +3295,15 @@ public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, @Override public Void call() throws Exception { SessionState.setCurrentSessionState(parentSession); - final String group = srcStatus.getGroup(); - if(!destFs.rename(srcStatus.getPath(), destFile)) { + boolean success = false; + if (destFs instanceof DistributedFileSystem) { + ((DistributedFileSystem) destFs).rename(srcStatus.getPath(), destFile, Options.Rename.OVERWRITE); + success = true; + } else { + destFs.delete(destFile, false); + success = destFs.rename(srcStatus.getPath(), destFile); + } + if (!success) { throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest path:" + destFile + " returned false"); } diff --git ql/src/test/queries/clientpositive/insert_after_drop_partition.q ql/src/test/queries/clientpositive/insert_after_drop_partition.q new file mode 100644 index 0000000..9de2495 --- /dev/null +++ ql/src/test/queries/clientpositive/insert_after_drop_partition.q @@ -0,0 +1,11 @@ + +-- set hive.exec.stagingdir=${system:test.tmp.dir}/staging-dir; +-- when we use default relative location of `hive.exec.stagingdir` + +create external table insert_after_drop_partition(key string, val string) partitioned by (insertdate string); + +from src insert overwrite table insert_after_drop_partition partition (insertdate='2008-01-01') select *; + +alter table insert_after_drop_partition drop partition (insertdate='2008-01-01'); + +insert overwrite table insert_after_drop_partition partition (insertdate='2008-01-01') select * from src limit 10; \ No newline at end of file diff --git ql/src/test/results/clientpositive/insert_after_drop_partition.q.out ql/src/test/results/clientpositive/insert_after_drop_partition.q.out new file mode 100644 index 0000000..8d84992 --- /dev/null +++ ql/src/test/results/clientpositive/insert_after_drop_partition.q.out @@ -0,0 +1,36 @@ +PREHOOK: query: create external table insert_after_drop_partition(key string, val string) partitioned by (insertdate string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@insert_after_drop_partition +POSTHOOK: query: create external table insert_after_drop_partition(key string, val string) partitioned by (insertdate string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@insert_after_drop_partition +PREHOOK: query: from src insert overwrite table insert_after_drop_partition partition (insertdate='2008-01-01') select * +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@insert_after_drop_partition@insertdate=2008-01-01 +POSTHOOK: query: from src insert overwrite table insert_after_drop_partition partition (insertdate='2008-01-01') select * +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@insert_after_drop_partition@insertdate=2008-01-01 +POSTHOOK: Lineage: insert_after_drop_partition PARTITION(insertdate=2008-01-01).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: insert_after_drop_partition PARTITION(insertdate=2008-01-01).val SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: alter table insert_after_drop_partition drop partition (insertdate='2008-01-01') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@insert_after_drop_partition +PREHOOK: Output: default@insert_after_drop_partition@insertdate=2008-01-01 +POSTHOOK: query: alter table insert_after_drop_partition drop partition (insertdate='2008-01-01') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@insert_after_drop_partition +POSTHOOK: Output: default@insert_after_drop_partition@insertdate=2008-01-01 +PREHOOK: query: insert overwrite table insert_after_drop_partition partition (insertdate='2008-01-01') select * from src limit 10 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@insert_after_drop_partition@insertdate=2008-01-01 +POSTHOOK: query: insert overwrite table insert_after_drop_partition partition (insertdate='2008-01-01') select * from src limit 10 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@insert_after_drop_partition@insertdate=2008-01-01 +POSTHOOK: Lineage: insert_after_drop_partition PARTITION(insertdate=2008-01-01).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: insert_after_drop_partition PARTITION(insertdate=2008-01-01).val SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]