diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 05c1244..8f36924 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -1607,6 +1607,40 @@ public void testTruncateWithCM() throws IOException { } @Test + public void testTruncateTempTable() throws IOException { + String testName = "truncateTempTable"; + LOG.info("Testing " + testName); + String dbName = testName + "_" + tid; + + run("CREATE DATABASE " + dbName); + run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE"); + run("CREATE TABLE " + dbName + ".ptned(a string) PARTITIONED BY(b int) STORED AS TEXTFILE"); + + String[] unptn_data = new String[] { "eleven", "twelve" }; + String[] ptn_data = new String[] { "hundred", "thousand" }; + String[] empty = new String[] {}; + run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[0] + "')"); + run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[1] + "')"); + verifySetup("SELECT a from " + dbName + ".unptned ORDER BY a", unptn_data); + + run("INSERT INTO TABLE " + dbName + ".ptned PARTITION (b=1) values('" + ptn_data[0] + "')"); + run("INSERT INTO TABLE " + dbName + ".ptned PARTITION (b=2) values('" + ptn_data[1] + "')"); + verifySetup("SELECT a from " + dbName + ".ptned ORDER BY a", ptn_data); + + run("CREATE TEMPORARY TABLE " + dbName + ".tmp_unptned AS SELECT * from " + dbName + ".unptned"); + verifySetup("SELECT a from " + dbName + ".tmp_unptned ORDER BY a", unptn_data); + + assert(run("TRUNCATE TABLE " + dbName + ".tmp_unptned", true)); + verifyRun("SELECT a from " + dbName + ".tmp_unptned", empty); + + run("CREATE TEMPORARY TABLE " + dbName + ".tmp_ptned AS SELECT * from " + dbName + ".ptned"); + verifySetup("SELECT a from " + dbName + ".tmp_ptned ORDER BY a", ptn_data); + + assert(run("TRUNCATE TABLE " + dbName + ".tmp_ptned", true)); + verifyRun("SELECT a from " + dbName + ".tmp_ptned", empty); + } + + @Test public void testStatus() throws IOException { // first test ReplStateMap functionality Map cmap = new ReplStateMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java index a319b88..4642ec2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java @@ -30,8 +30,13 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.io.HdfsUtils; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -56,6 +61,8 @@ import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.HadoopShims; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.ql.stats.StatsUtils; import org.apache.thrift.TException; @@ -115,6 +122,18 @@ protected void drop_table_with_environment_context(String dbname, String name, } @Override + public void truncateTable(String dbName, String tableName, List partNames) throws MetaException, TException { + // First try temp table + org.apache.hadoop.hive.metastore.api.Table table = getTempTable(dbName, tableName); + if (table != null) { + truncateTempTable(table); + return; + } + // Try underlying client + super.truncateTable(dbName, tableName, partNames); + } + + @Override public org.apache.hadoop.hive.metastore.api.Table getTable(String dbname, String name) throws MetaException, TException, NoSuchObjectException { // First check temp tables @@ -509,6 +528,63 @@ private static boolean fieldSchemaEqualsIgnoreComment(FieldSchema left, FieldSch return false; } + private boolean needToUpdateStats(Map props, EnvironmentContext environmentContext) { + if (null == props) { + return false; + } + boolean statsPresent = false; + for (String stat : StatsSetupConst.supportedStats) { + String statVal = props.get(stat); + if (statVal != null && Long.parseLong(statVal) > 0) { + statsPresent = true; + //In the case of truncate table, we set the stats to be 0. + props.put(stat, "0"); + } + } + //first set basic stats to true + StatsSetupConst.setBasicStatsState(props, StatsSetupConst.TRUE); + environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.TASK); + //then invalidate column stats + StatsSetupConst.clearColumnStatsState(props); + return statsPresent; + } + + private void truncateTempTable(org.apache.hadoop.hive.metastore.api.Table table) throws MetaException, TException { + + boolean isAutopurge = "true".equalsIgnoreCase(table.getParameters().get("auto.purge")); + try { + // this is not transactional + Path location = new Path(table.getSd().getLocation()); + + FileSystem fs = location.getFileSystem(conf); + HadoopShims.HdfsEncryptionShim shim + = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf); + if (!shim.isPathEncrypted(location)) { + HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(conf, fs, location); + FileStatus targetStatus = fs.getFileStatus(location); + String targetGroup = targetStatus == null ? null : targetStatus.getGroup(); + FileUtils.moveToTrash(fs, location, conf, isAutopurge); + fs.mkdirs(location); + HdfsUtils.setFullFileStatus(conf, status, targetGroup, fs, location, false); + } else { + FileStatus[] statuses = fs.listStatus(location, FileUtils.HIDDEN_FILES_PATH_FILTER); + if ((statuses != null) && (statuses.length > 0)) { + boolean success = Hive.trashFiles(fs, statuses, conf, isAutopurge); + if (!success) { + throw new HiveException("Error in deleting the contents of " + location.toString()); + } + } + } + + EnvironmentContext environmentContext = new EnvironmentContext(); + if (needToUpdateStats(table.getParameters(), environmentContext)) { + alter_table_with_environmentContext(table.getDbName(), table.getTableName(), table, environmentContext); + } + } catch (Exception e) { + throw new MetaException(e.getMessage()); + } + } + private void dropTempTable(org.apache.hadoop.hive.metastore.api.Table table, boolean deleteData, EnvironmentContext envContext) throws MetaException, TException, NoSuchObjectException, UnsupportedOperationException {