Index: ql/src/test/results/clientnegative/touch2.q.out =================================================================== --- ql/src/test/results/clientnegative/touch2.q.out (revision 0) +++ ql/src/test/results/clientnegative/touch2.q.out (revision 0) @@ -0,0 +1,4 @@ +PREHOOK: query: ALTER TABLE src TOUCH PARTITION (ds='2008-04-08', hr='12') +PREHOOK: type: ALTERTABLE_TOUCH +FAILED: Error in metadata: table is not partitioned but partition spec exists: {ds=2008-04-08, hr=12} +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientnegative/touch1.q.out =================================================================== --- ql/src/test/results/clientnegative/touch1.q.out (revision 0) +++ ql/src/test/results/clientnegative/touch1.q.out (revision 0) @@ -0,0 +1,4 @@ +PREHOOK: query: ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='13') +PREHOOK: type: ALTERTABLE_TOUCH +FAILED: Error in metadata: Specified partition does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask Index: ql/src/test/results/clientpositive/touch.q.out =================================================================== --- ql/src/test/results/clientpositive/touch.q.out (revision 0) +++ ql/src/test/results/clientpositive/touch.q.out (revision 0) @@ -0,0 +1,18 @@ +PREHOOK: query: ALTER TABLE src TOUCH +PREHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: query: ALTER TABLE src TOUCH +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: default@src +POSTHOOK: Output: default@src +PREHOOK: query: ALTER TABLE srcpart TOUCH +PREHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: query: ALTER TABLE srcpart TOUCH +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: default@srcpart +POSTHOOK: Output: default@srcpart +PREHOOK: query: ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='12') +PREHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: query: ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='12') +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +POSTHOOK: Output: default@srcpart@ds=2008-04-08/hr=12 Index: ql/src/test/queries/clientnegative/touch1.q =================================================================== --- ql/src/test/queries/clientnegative/touch1.q (revision 0) +++ ql/src/test/queries/clientnegative/touch1.q (revision 0) @@ -0,0 +1 @@ +ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='13'); Index: ql/src/test/queries/clientnegative/touch2.q =================================================================== --- ql/src/test/queries/clientnegative/touch2.q (revision 0) +++ ql/src/test/queries/clientnegative/touch2.q (revision 0) @@ -0,0 +1 @@ +ALTER TABLE src TOUCH PARTITION (ds='2008-04-08', hr='12'); Index: ql/src/test/queries/clientpositive/touch.q =================================================================== --- ql/src/test/queries/clientpositive/touch.q (revision 0) +++ ql/src/test/queries/clientpositive/touch.q (revision 0) @@ -0,0 +1,3 @@ +ALTER TABLE src TOUCH; +ALTER TABLE srcpart TOUCH; +ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='12'); \ No newline at end of file Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 5803) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -78,6 +78,7 @@ import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; +import org.apache.hadoop.hive.ql.plan.TouchDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; @@ -149,6 +150,12 @@ return addPartition(db, addPartitionDesc); } + TouchDesc touchDesc = work.getTouchDesc(); + if (touchDesc != null) { + return touch(db, touchDesc); + } + + MsckDesc msckDesc = work.getMsckDesc(); if (msckDesc != null) { return msck(db, msckDesc); @@ -241,6 +248,49 @@ return 0; } + /** + * Rewrite the partition's metadata and force the pre/post execute hooks to + * be fired. + * + * @param db + * @param touchDesc + * @return + * @throws HiveException + */ + private int touch(Hive db, TouchDesc touchDesc) + throws HiveException { + + String dbName = touchDesc.getDbName(); + String tblName = touchDesc.getTableName(); + + Table tbl = db.getTable(dbName, tblName); + + validateAlterTableType(tbl, AlterTableDesc.AlterTableTypes.TOUCH); + + if (touchDesc.getPartSpec() == null) { + try { + db.alterTable(tblName, tbl); + } catch (InvalidOperationException e) { + throw new HiveException("Uable to update table"); + } + work.getInputs().add(new ReadEntity(tbl)); + work.getOutputs().add(new WriteEntity(tbl)); + } else { + Partition part = db.getPartition(tbl, touchDesc.getPartSpec(), false); + if (part == null) { + throw new HiveException("Specified partition does not exist"); + } + try { + db.alterPartition(tblName, part); + } catch (InvalidOperationException e) { + throw new HiveException(e); + } + work.getInputs().add(new ReadEntity(part)); + work.getOutputs().add(new WriteEntity(part)); + } + return 0; + } + private void validateAlterTableType( Table tbl, AlterTableDesc.AlterTableTypes alterType) throws HiveException { Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 5803) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -20,7 +20,6 @@ import java.io.Serializable; import java.util.HashSet; -import java.util.Set; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; @@ -43,6 +42,7 @@ private ShowPartitionsDesc showPartsDesc; private DescTableDesc descTblDesc; private AddPartitionDesc addPartitionDesc; + private TouchDesc touchDesc; private MsckDesc msckDesc; private ShowTableStatusDesc showTblStatusDesc; @@ -178,7 +178,18 @@ this.addPartitionDesc = addPartitionDesc; } + /** + * @param touchDesc + * information about the table/partitions that we want to touch + */ public DDLWork(HashSet inputs, HashSet outputs, + TouchDesc touchDesc) { + this(inputs, outputs); + + this.touchDesc = touchDesc; + } + + public DDLWork(HashSet inputs, HashSet outputs, MsckDesc checkDesc) { this(inputs, outputs); @@ -372,6 +383,21 @@ } /** + * @return information about the table/partitionss we want to touch. + */ + public TouchDesc getTouchDesc() { + return touchDesc; + } + + /** + * @param touchDesc + * information about the table/partitions we want to touch. + */ + public void setTouchDesc(TouchDesc touchDesc) { + this.touchDesc = touchDesc; + } + + /** * @return Metastore check description */ public MsckDesc getMsckDesc() { Index: ql/src/java/org/apache/hadoop/hive/ql/plan/TouchDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/TouchDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/TouchDesc.java (revision 0) @@ -0,0 +1,81 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Contains information needed to touch a partition (cause pre/post hooks to + * fire). + */ +public class TouchDesc extends DDLDesc { + private String tableName; + private String dbName; + private LinkedHashMap partSpec; + + + public TouchDesc() { + } + + /** + * @param dbName + * database that contains the table / partition + * @param tableName + * table containing the partition + * @param partSpec + * partition specification. Null if touching a table. + */ + public TouchDesc(String dbName, String tableName, + Map partSpec) { + super(); + this.dbName = dbName; + this.tableName = tableName; + if(partSpec == null) { + this.partSpec = null; + } else { + this.partSpec = new LinkedHashMap(partSpec); + } + } + + public String getTableName() { + return tableName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + + public String getDbName() { + return dbName; + } + + public void setDbName(String dbName) { + this.dbName = dbName; + } + + public LinkedHashMap getPartSpec() { + return partSpec; + } + + public void setPartSpec(LinkedHashMap partSpec) { + this.partSpec = partSpec; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (revision 5803) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (working copy) @@ -41,7 +41,8 @@ */ public static enum AlterTableTypes { RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, ADDSERDE, ADDSERDEPROPS, - ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION + ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION, + TOUCH }; AlterTableTypes op; @@ -134,7 +135,7 @@ } /** - * + * * @param name * name of the table * @param inputFormat Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 5803) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -94,6 +94,7 @@ TOK_ALTERTABLE_REPLACECOLS; TOK_ALTERTABLE_ADDPARTS; TOK_ALTERTABLE_DROPPARTS; +TOK_ALTERTABLE_TOUCH; TOK_ALTERTABLE_SERDEPROPERTIES; TOK_ALTERTABLE_SERIALIZER; TOK_ALTERTABLE_FILEFORMAT; @@ -279,6 +280,7 @@ | alterStatementSuffixRenameCol | alterStatementSuffixDropPartitions | alterStatementSuffixAddPartitions + | alterStatementSuffixTouch | alterStatementSuffixProperties | alterStatementSuffixSerdeProperties | alterStatementSuffixFileFormat @@ -326,6 +328,13 @@ -> ^(TOK_ALTERTABLE_ADDPARTS Identifier ifNotExists? (partitionSpec partitionLocation?)+) ; +alterStatementSuffixTouch +@init { msgs.push("touch statement"); } +@after { msgs.pop(); } + : Identifier KW_TOUCH (partitionSpec)* + -> ^(TOK_ALTERTABLE_TOUCH Identifier (partitionSpec)*) + ; + partitionLocation @init { msgs.push("partition location"); } @after { msgs.pop(); } @@ -1606,6 +1615,7 @@ KW_RECORDWRITER: 'RECORDWRITER'; KW_SEMI: 'SEMI'; KW_LATERAL: 'LATERAL'; +KW_TOUCH: 'TOUCH'; // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 5803) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -45,6 +45,7 @@ commandType.put(HiveParser.TOK_ALTERTABLE_RENAME, "ALTERTABLE_RENAME"); commandType.put(HiveParser.TOK_ALTERTABLE_DROPPARTS, "ALTERTABLE_DROPPARTS"); commandType.put(HiveParser.TOK_ALTERTABLE_ADDPARTS, "ALTERTABLE_ADDPARTS"); + commandType.put(HiveParser.TOK_ALTERTABLE_TOUCH, "ALTERTABLE_TOUCH"); commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, "ALTERTABLE_PROPERTIES"); commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, "ALTERTABLE_SERIALIZER"); commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, "ALTERTABLE_SERDEPROPERTIES"); @@ -96,6 +97,7 @@ case HiveParser.TOK_SHOWPARTITIONS: case HiveParser.TOK_ALTERTABLE_FILEFORMAT: case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: + case HiveParser.TOK_ALTERTABLE_TOUCH: return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_CREATEFUNCTION: case HiveParser.TOK_DROPFUNCTION: Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 5803) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.ql.plan.TouchDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; @@ -118,6 +119,8 @@ analyzeAlterTableProps(ast, true); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) { analyzeAlterTableRename(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { + analyzeAlterTableTouch(ast); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { @@ -577,7 +580,40 @@ } } + /** + * Rewrite the metadata for one or more partitions in a table. Useful when + * an external process modifies files on HDFS and you want the pre/post + * hooks to be fired for the specified partition. + * + * @param ast + * The parsed command tree. + * @throws SemanticException + * Parsin failed + */ + private void analyzeAlterTableTouch(CommonTree ast) + throws SemanticException { + + String tblName = unescapeIdentifier(ast.getChild(0).getText()); + // partition name to value + List> partSpecs = getPartitionSpecs(ast); + + if (partSpecs.size() == 0) { + TouchDesc touchDesc = new TouchDesc( + MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, null); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + touchDesc), conf)); + } else { + for (Map partSpec : partSpecs) { + TouchDesc touchDesc = new TouchDesc( + MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, partSpec); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + touchDesc), conf)); + } + } + } + + /** * Verify that the information in the metastore matches up with the data on * the fs. *