diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 84ee78f..880e22d 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -730,6 +730,8 @@
HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS("hive.security.authorization.createtable.owner.grants",
""),
+ HIVE_TRUNCATE_SKIPTRASH("hive.truncate.skiptrash", false),
+
// Print column names in output
HIVE_CLI_PRINT_HEADER("hive.cli.print.header", false),
diff --git conf/hive-default.xml.template conf/hive-default.xml.template
index 66d22f9..1d19515 100644
--- conf/hive-default.xml.template
+++ conf/hive-default.xml.template
@@ -2192,4 +2192,13 @@
+
+ hive.truncate.skiptrash
+ false
+
+ if true will remove data to trash, else false drop data immediately
+
+
+
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 92ed55b..7d92929 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -4047,7 +4047,11 @@ private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws H
// this is not transactional
for (Path location : getLocations(db, table, partSpec)) {
FileSystem fs = location.getFileSystem(conf);
- fs.delete(location, true);
+ if (HiveConf.getBoolVar(conf, ConfVars.HIVE_TRUNCATE_SKIPTRASH)) {
+ fs.delete(location, true);
+ } else {
+ deleteDir(location);
+ }
fs.mkdirs(location);
}
} catch (Exception e) {
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0e2d555..93ec283 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -20,6 +20,7 @@
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASELOCATION;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASEPROPERTIES;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_FORCE;
import java.io.Serializable;
import java.net.URI;
@@ -711,15 +712,19 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException {
String tableName = getUnescapedName((ASTNode) root.getChild(0));
Table table = getTable(tableName, true);
- if (table.getTableType() != TableType.MANAGED_TABLE) {
- throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_MANAGED_TABLE.format(tableName));
- }
- if (table.isNonNative()) {
- throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); //TODO
- }
- if (!table.isPartitioned() && root.getChildCount() > 1) {
- throw new SemanticException(ErrorMsg.PARTSPEC_FOR_NON_PARTITIONED_TABLE.format(tableName));
+
+ if (ast.getFirstChildWithType(TOK_FORCE) != null) {
+ if (table.getTableType() != TableType.MANAGED_TABLE) {
+ throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_MANAGED_TABLE.format(tableName));
+ }
+ if (table.isNonNative()) {
+ throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); //TODO
+ }
+ if (!table.isPartitioned() && root.getChildCount() > 1) {
+ throw new SemanticException(ErrorMsg.PARTSPEC_FOR_NON_PARTITIONED_TABLE.format(tableName));
+ }
}
+
Map partSpec = getPartSpec((ASTNode) root.getChild(1));
if (partSpec == null) {
if (!table.isPartitioned()) {
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index f83c15d..19a36c3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -260,6 +260,7 @@ KW_SHOW_DATABASE: 'SHOW_DATABASE';
KW_UPDATE: 'UPDATE';
KW_RESTRICT: 'RESTRICT';
KW_CASCADE: 'CASCADE';
+KW_FORCE: 'FORCE';
KW_SKEWED: 'SKEWED';
KW_ROLLUP: 'ROLLUP';
KW_CUBE: 'CUBE';
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 1ce6bf3..286c051 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -290,6 +290,7 @@ TOK_TABNAME;
TOK_TABSRC;
TOK_RESTRICT;
TOK_CASCADE;
+TOK_FORCE;
TOK_TABLESKEWED;
TOK_TABCOLVALUE;
TOK_TABCOLVALUE_PAIR;
@@ -675,6 +676,13 @@ ifNotExists
: KW_IF KW_NOT KW_EXISTS
-> ^(TOK_IFNOTEXISTS)
;
+
+force
+@init { msgs.push("force clause"); }
+@after { msgs.pop(); }
+ : KW_FORCE
+ -> ^(TOK_FORCE)
+ ;
storedAsDirs
@init { msgs.push("stored as directories"); }
@@ -788,7 +796,7 @@ createTableStatement
truncateTableStatement
@init { msgs.push("truncate table statement"); }
@after { msgs.pop(); }
- : KW_TRUNCATE KW_TABLE tablePartitionPrefix (KW_COLUMNS LPAREN columnNameList RPAREN)? -> ^(TOK_TRUNCATETABLE tablePartitionPrefix columnNameList?);
+ : KW_TRUNCATE KW_TABLE tablePartitionPrefix (KW_COLUMNS LPAREN columnNameList RPAREN)? force? -> ^(TOK_TRUNCATETABLE tablePartitionPrefix columnNameList? force?);
createIndexStatement
@init { msgs.push("create index statement");}