diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java index 8b63e4dfbde15e0525bc7ea5ede96defc71a67b2..f28edc66ea4644c5847ee6abe2e26306f9fbb43e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java @@ -22,7 +22,7 @@ import java.util.ArrayList; import java.util.List; -import junit.framework.Assert; +import org.junit.Assert; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -645,6 +645,40 @@ public void testExim() throws Exception { } } + /** + * Tests the permission to the table doesn't change after the truncation + * @throws Exception + */ + @Test + public void testTruncateTable() throws Exception { + String tableName = "truncatetable"; + String partition = warehouseDir + "/" + tableName + "/part1=1"; + + CommandProcessorResponse ret = driver.run("CREATE TABLE " + tableName + " (key STRING, value STRING) PARTITIONED BY (part1 INT)"); + Assert.assertEquals(0, ret.getResponseCode()); + + setPermission(warehouseDir + "/" + tableName); + + ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'"); + Assert.assertEquals(0, ret.getResponseCode()); + + assertExistence(warehouseDir + "/" + tableName); + + verifyPermission(warehouseDir + "/" + tableName); + verifyPermission(partition); + + ret = driver.run("TRUNCATE TABLE " + tableName); + Assert.assertEquals(0, ret.getResponseCode()); + + ret = driver.run("insert into table " + tableName + " partition(part1='1') select key,value from mysrc where part1='1' and part2='1'"); + Assert.assertEquals(0, ret.getResponseCode()); + + verifyPermission(warehouseDir + "/" + tableName); + + assertExistence(partition); + verifyPermission(partition); + } + private void verifySinglePartition(String tableLoc, int index) throws Exception { verifyPermission(tableLoc + "/part1=1", index); verifyPermission(tableLoc + "/part1=2", index); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 837842169d1929e6016f7e1a260d6f9e4feac0b5..63b1e1af75fea38906eb845bb369ceb2c7b4d38d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -169,6 +169,9 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.hive.shims.HadoopShims; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.tools.HadoopArchives; import org.apache.hadoop.util.ReflectionUtils; @@ -4228,10 +4231,18 @@ private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws H try { // this is not transactional + HadoopShims shim = ShimLoader.getHadoopShims(); for (Path location : getLocations(db, table, partSpec)) { FileSystem fs = location.getFileSystem(conf); + + HdfsFileStatus fullFileStatus = shim.getFullFileStatus(conf, fs, location); fs.delete(location, true); fs.mkdirs(location); + try { + shim.setFullFileStatus(conf, fullFileStatus, fs, location); + } catch (Exception e) { + LOG.warn("Error setting permissions of " + location, e); + } } } catch (Exception e) { throw new HiveException(e, ErrorMsg.GENERIC_ERROR);