Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1468713)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -651,6 +651,8 @@
SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook", ""),
+ HIVE_TRUNCATE_SKIPTRASH("hive.truncate.skiptrash", false),
+
HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false),
HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
"org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"),
Index: conf/hive-default.xml.template
===================================================================
--- conf/hive-default.xml.template (revision 1468713)
+++ conf/hive-default.xml.template (working copy)
@@ -1886,5 +1886,14 @@
+
+ hive.truncate.skiptrash
+ false
+
+ if true will remove data to trash, else false drop data immediately
+
+
+
+
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1468713)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy)
@@ -164,7 +164,6 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ToolRunner;
-import org.stringtemplate.v4.ST;
/**
* DDLTask implementation.
@@ -3957,10 +3956,13 @@
Table table = db.getTable(tableName, true);
try {
- // this is not transactional
for (Path location : getLocations(db, table, partSpec)) {
FileSystem fs = location.getFileSystem(conf);
- fs.delete(location, true);
+ if (HiveConf.getBoolVar(conf, ConfVars.HIVE_TRUNCATE_SKIPTRASH)) {
+ fs.delete(location, true);
+ } else {
+ deleteDir(location);
+ }
fs.mkdirs(location);
}
} catch (Exception e) {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1468713)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy)
@@ -20,6 +20,7 @@
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASELOCATION;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASEPROPERTIES;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_FORCE;
import java.io.Serializable;
import java.net.URI;
@@ -75,6 +76,7 @@
import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes;
+import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
@@ -126,7 +128,6 @@
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
/**
* DDLSemanticAnalyzer.
@@ -749,17 +750,24 @@
private void analyzeTruncateTable(ASTNode ast) throws SemanticException {
ASTNode root = (ASTNode) ast.getChild(0); // TOK_TABLE_PARTITION
String tableName = getUnescapedName((ASTNode) root.getChild(0));
+ boolean isForce = false;
+ if (null != ast.getFirstChildWithType(TOK_FORCE)) {
+ isForce = true;
+ }
Table table = getTable(tableName, true);
- if (table.getTableType() != TableType.MANAGED_TABLE) {
- throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_MANAGED_TABLE.format(tableName));
+ if (!isForce) {
+ if (table.getTableType() != TableType.MANAGED_TABLE) {
+ throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_MANAGED_TABLE.format(tableName));
+ }
+ if (table.isNonNative()) {
+ throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); //TODO
+ }
+ if (!table.isPartitioned() && root.getChildCount() > 1) {
+ throw new SemanticException(ErrorMsg.PARTSPEC_FOR_NON_PARTITIONED_TABLE.format(tableName));
+ }
}
- if (table.isNonNative()) {
- throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); //TODO
- }
- if (!table.isPartitioned() && root.getChildCount() > 1) {
- throw new SemanticException(ErrorMsg.PARTSPEC_FOR_NON_PARTITIONED_TABLE.format(tableName));
- }
+
Map partSpec = getPartSpec((ASTNode) root.getChild(1));
if (partSpec == null) {
if (!table.isPartitioned()) {
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (revision 1468713)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (working copy)
@@ -237,6 +237,7 @@
KW_UPDATE: 'UPDATE';
KW_RESTRICT: 'RESTRICT';
KW_CASCADE: 'CASCADE';
+KW_FORCE: 'FORCE';
KW_SKEWED: 'SKEWED';
KW_ROLLUP: 'ROLLUP';
KW_CUBE: 'CUBE';
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (revision 1468713)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (working copy)
@@ -278,6 +278,7 @@
TOK_TABSRC;
TOK_RESTRICT;
TOK_CASCADE;
+TOK_FORCE;
TOK_TABLESKEWED;
TOK_TABCOLVALUE;
TOK_TABCOLVALUE_PAIR;
@@ -643,6 +644,13 @@
: KW_IF KW_NOT KW_EXISTS
-> ^(TOK_IFNOTEXISTS)
;
+
+force
+@init { msgs.push("force clause"); }
+@after { msgs.pop(); }
+ : KW_FORCE
+ -> ^(TOK_FORCE)
+ ;
storedAsDirs
@init { msgs.push("stored as directories"); }
@@ -756,7 +764,7 @@
truncateTableStatement
@init { msgs.push("truncate table statement"); }
@after { msgs.pop(); }
- : KW_TRUNCATE KW_TABLE tablePartitionPrefix -> ^(TOK_TRUNCATETABLE tablePartitionPrefix);
+ : KW_TRUNCATE KW_TABLE tablePartitionPrefix force? -> ^(TOK_TRUNCATETABLE tablePartitionPrefix force?);
createIndexStatement
@init { msgs.push("create index statement");}