Index: ql/src/test/results/clientpositive/drop_partitions_filter.q.out =================================================================== --- ql/src/test/results/clientpositive/drop_partitions_filter.q.out (revision 0) +++ ql/src/test/results/clientpositive/drop_partitions_filter.q.out (working copy) @@ -0,0 +1,199 @@ +PREHOOK: query: create table ptestfilter (a string, b int) partitioned by (c string, d string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table ptestfilter (a string, b int) partitioned by (c string, d string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@ptestfilter +PREHOOK: query: describe ptestfilter +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe ptestfilter +POSTHOOK: type: DESCTABLE +a string +b int +c string +d string +PREHOOK: query: alter table ptestfilter add partition (c='US', d=1) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='US', d=1) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=US/d=1 +PREHOOK: query: alter table ptestfilter add partition (c='US', d=2) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='US', d=2) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=US/d=2 +PREHOOK: query: alter table ptestFilter add partition (c='Uganda', d=2) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestFilter add partition (c='Uganda', d=2) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Uganda/d=2 +PREHOOK: query: alter table ptestfilter add partition (c='Germany', d=2) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='Germany', d=2) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Germany/d=2 +PREHOOK: query: alter table ptestfilter add partition (c='Canada', d=3) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='Canada', d=3) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Canada/d=3 +PREHOOK: query: alter table ptestfilter add partition (c='Russia', d=3) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='Russia', d=3) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Russia/d=3 +PREHOOK: query: alter table ptestfilter add partition (c='Greece', d=2) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='Greece', d=2) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Greece/d=2 +PREHOOK: query: alter table ptestfilter add partition (c='India', d=3) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='India', d=3) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=India/d=3 +PREHOOK: query: alter table ptestfilter add partition (c='France', d=4) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter add partition (c='France', d=4) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=France/d=4 +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=Canada/d=3 +c=France/d=4 +c=Germany/d=2 +c=Greece/d=2 +c=India/d=3 +c=Russia/d=3 +c=US/d=1 +c=US/d=2 +c=Uganda/d=2 +PREHOOK: query: alter table ptestfilter drop partition (c='US', d<'2') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@ptestfilter +PREHOOK: Output: default@ptestfilter@c=US/d=1 +POSTHOOK: query: alter table ptestfilter drop partition (c='US', d<'2') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=US/d=1 +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=Canada/d=3 +c=France/d=4 +c=Germany/d=2 +c=Greece/d=2 +c=India/d=3 +c=Russia/d=3 +c=US/d=2 +c=Uganda/d=2 +PREHOOK: query: alter table ptestfilter drop partition (c>='US', d<='2') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@ptestfilter +PREHOOK: Output: default@ptestfilter@c=US/d=2 +PREHOOK: Output: default@ptestfilter@c=Uganda/d=2 +POSTHOOK: query: alter table ptestfilter drop partition (c>='US', d<='2') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=US/d=2 +POSTHOOK: Output: default@ptestfilter@c=Uganda/d=2 +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=Canada/d=3 +c=France/d=4 +c=Germany/d=2 +c=Greece/d=2 +c=India/d=3 +c=Russia/d=3 +PREHOOK: query: alter table ptestfilter drop partition (c >'India') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@ptestfilter +PREHOOK: Output: default@ptestfilter@c=Russia/d=3 +POSTHOOK: query: alter table ptestfilter drop partition (c >'India') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Russia/d=3 +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=Canada/d=3 +c=France/d=4 +c=Germany/d=2 +c=Greece/d=2 +c=India/d=3 +PREHOOK: query: alter table ptestfilter drop partition (c >='India'), + partition (c='Greece', d='2') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@ptestfilter +PREHOOK: Output: default@ptestfilter@c=Greece/d=2 +PREHOOK: Output: default@ptestfilter@c=India/d=3 +POSTHOOK: query: alter table ptestfilter drop partition (c >='India'), + partition (c='Greece', d='2') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Greece/d=2 +POSTHOOK: Output: default@ptestfilter@c=India/d=3 +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=Canada/d=3 +c=France/d=4 +c=Germany/d=2 +PREHOOK: query: alter table ptestfilter drop partition (c != 'France') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@ptestfilter +PREHOOK: Output: default@ptestfilter@c=Canada/d=3 +PREHOOK: Output: default@ptestfilter@c=Germany/d=2 +POSTHOOK: query: alter table ptestfilter drop partition (c != 'France') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter@c=Canada/d=3 +POSTHOOK: Output: default@ptestfilter@c=Germany/d=2 +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=France/d=4 +PREHOOK: query: alter table ptestfilter drop if exists partition (c='US') +PREHOOK: type: ALTERTABLE_DROPPARTS +PREHOOK: Input: default@ptestfilter +POSTHOOK: query: alter table ptestfilter drop if exists partition (c='US') +POSTHOOK: type: ALTERTABLE_DROPPARTS +POSTHOOK: Input: default@ptestfilter +PREHOOK: query: show partitions ptestfilter +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter +POSTHOOK: type: SHOWPARTITIONS +c=France/d=4 +PREHOOK: query: drop table ptestfilter +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@ptestfilter +PREHOOK: Output: default@ptestfilter +POSTHOOK: query: drop table ptestfilter +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@ptestfilter +POSTHOOK: Output: default@ptestfilter Index: ql/src/test/results/clientpositive/drop_multi_partitions.q.out =================================================================== --- ql/src/test/results/clientpositive/drop_multi_partitions.q.out (revision 1307617) +++ ql/src/test/results/clientpositive/drop_multi_partitions.q.out (working copy) @@ -36,7 +36,7 @@ POSTHOOK: query: explain extended alter table mp drop partition (b='1') POSTHOOK: type: ALTERTABLE_DROPPARTS ABSTRACT SYNTAX TREE: - (TOK_ALTERTABLE_DROPPARTS mp (TOK_PARTSPEC (TOK_PARTVAL b '1'))) + (TOK_ALTERTABLE_DROPPARTS mp (TOK_PARTSPEC (TOK_PARTVAL b = '1'))) STAGE DEPENDENCIES: Stage-0 is a root stage @@ -51,6 +51,8 @@ PREHOOK: query: alter table mp drop partition (b='1') PREHOOK: type: ALTERTABLE_DROPPARTS PREHOOK: Input: default@mp +PREHOOK: Output: default@mp@b=1/c=1 +PREHOOK: Output: default@mp@b=1/c=2 POSTHOOK: query: alter table mp drop partition (b='1') POSTHOOK: type: ALTERTABLE_DROPPARTS POSTHOOK: Input: default@mp Index: ql/src/test/results/clientpositive/escape1.q.out =================================================================== --- ql/src/test/results/clientpositive/escape1.q.out (revision 1307617) +++ ql/src/test/results/clientpositive/escape1.q.out (working copy) @@ -1574,6 +1574,131 @@ PREHOOK: query: ALTER TABLE escape1 DROP PARTITION (ds='1') PREHOOK: type: ALTERTABLE_DROPPARTS PREHOOK: Input: default@escape1 +PREHOOK: Output: default@escape1@ds=1/part= +PREHOOK: Output: default@escape1@ds=1/part=! +PREHOOK: Output: default@escape1@ds=1/part=$ +PREHOOK: Output: default@escape1@ds=1/part=%00 +PREHOOK: Output: default@escape1@ds=1/part=%02 +PREHOOK: Output: default@escape1@ds=1/part=%03 +PREHOOK: Output: default@escape1@ds=1/part=%04 +PREHOOK: Output: default@escape1@ds=1/part=%05 +PREHOOK: Output: default@escape1@ds=1/part=%06 +PREHOOK: Output: default@escape1@ds=1/part=%07 +PREHOOK: Output: default@escape1@ds=1/part=%08 +PREHOOK: Output: default@escape1@ds=1/part=%09 +PREHOOK: Output: default@escape1@ds=1/part=%0B +PREHOOK: Output: default@escape1@ds=1/part=%0C +PREHOOK: Output: default@escape1@ds=1/part=%0E +PREHOOK: Output: default@escape1@ds=1/part=%0F +PREHOOK: Output: default@escape1@ds=1/part=%10 +PREHOOK: Output: default@escape1@ds=1/part=%11 +PREHOOK: Output: default@escape1@ds=1/part=%12 +PREHOOK: Output: default@escape1@ds=1/part=%13 +PREHOOK: Output: default@escape1@ds=1/part=%14 +PREHOOK: Output: default@escape1@ds=1/part=%15 +PREHOOK: Output: default@escape1@ds=1/part=%16 +PREHOOK: Output: default@escape1@ds=1/part=%17 +PREHOOK: Output: default@escape1@ds=1/part=%18 +PREHOOK: Output: default@escape1@ds=1/part=%19 +PREHOOK: Output: default@escape1@ds=1/part=%1A +PREHOOK: Output: default@escape1@ds=1/part=%1B +PREHOOK: Output: default@escape1@ds=1/part=%1C +PREHOOK: Output: default@escape1@ds=1/part=%1D +PREHOOK: Output: default@escape1@ds=1/part=%1E +PREHOOK: Output: default@escape1@ds=1/part=%1F +PREHOOK: Output: default@escape1@ds=1/part=%22 +PREHOOK: Output: default@escape1@ds=1/part=%23 +PREHOOK: Output: default@escape1@ds=1/part=%25 +PREHOOK: Output: default@escape1@ds=1/part=%27 +PREHOOK: Output: default@escape1@ds=1/part=%2A +PREHOOK: Output: default@escape1@ds=1/part=%2F +PREHOOK: Output: default@escape1@ds=1/part=%3A +PREHOOK: Output: default@escape1@ds=1/part=%3D +PREHOOK: Output: default@escape1@ds=1/part=%3F +PREHOOK: Output: default@escape1@ds=1/part=%5B +PREHOOK: Output: default@escape1@ds=1/part=%5C +PREHOOK: Output: default@escape1@ds=1/part=%5D +PREHOOK: Output: default@escape1@ds=1/part=%5E +PREHOOK: Output: default@escape1@ds=1/part=%7B +PREHOOK: Output: default@escape1@ds=1/part=& +PREHOOK: Output: default@escape1@ds=1/part=( +PREHOOK: Output: default@escape1@ds=1/part=) +PREHOOK: Output: default@escape1@ds=1/part=+ +PREHOOK: Output: default@escape1@ds=1/part=, +PREHOOK: Output: default@escape1@ds=1/part=- +PREHOOK: Output: default@escape1@ds=1/part=. +PREHOOK: Output: default@escape1@ds=1/part=0 +PREHOOK: Output: default@escape1@ds=1/part=1 +PREHOOK: Output: default@escape1@ds=1/part=2 +PREHOOK: Output: default@escape1@ds=1/part=3 +PREHOOK: Output: default@escape1@ds=1/part=4 +PREHOOK: Output: default@escape1@ds=1/part=5 +PREHOOK: Output: default@escape1@ds=1/part=6 +PREHOOK: Output: default@escape1@ds=1/part=7 +PREHOOK: Output: default@escape1@ds=1/part=8 +PREHOOK: Output: default@escape1@ds=1/part=9 +PREHOOK: Output: default@escape1@ds=1/part=; +PREHOOK: Output: default@escape1@ds=1/part=< +PREHOOK: Output: default@escape1@ds=1/part=> +PREHOOK: Output: default@escape1@ds=1/part=@ +PREHOOK: Output: default@escape1@ds=1/part=A +PREHOOK: Output: default@escape1@ds=1/part=B +PREHOOK: Output: default@escape1@ds=1/part=C +PREHOOK: Output: default@escape1@ds=1/part=D +PREHOOK: Output: default@escape1@ds=1/part=E +PREHOOK: Output: default@escape1@ds=1/part=F +PREHOOK: Output: default@escape1@ds=1/part=G +PREHOOK: Output: default@escape1@ds=1/part=H +PREHOOK: Output: default@escape1@ds=1/part=I +PREHOOK: Output: default@escape1@ds=1/part=J +PREHOOK: Output: default@escape1@ds=1/part=K +PREHOOK: Output: default@escape1@ds=1/part=L +PREHOOK: Output: default@escape1@ds=1/part=M +PREHOOK: Output: default@escape1@ds=1/part=N +PREHOOK: Output: default@escape1@ds=1/part=O +PREHOOK: Output: default@escape1@ds=1/part=P +PREHOOK: Output: default@escape1@ds=1/part=Q +PREHOOK: Output: default@escape1@ds=1/part=R +PREHOOK: Output: default@escape1@ds=1/part=S +PREHOOK: Output: default@escape1@ds=1/part=T +PREHOOK: Output: default@escape1@ds=1/part=U +PREHOOK: Output: default@escape1@ds=1/part=V +PREHOOK: Output: default@escape1@ds=1/part=W +PREHOOK: Output: default@escape1@ds=1/part=X +PREHOOK: Output: default@escape1@ds=1/part=Y +PREHOOK: Output: default@escape1@ds=1/part=Z +PREHOOK: Output: default@escape1@ds=1/part=_ +PREHOOK: Output: default@escape1@ds=1/part=__HIVE_DEFAULT_PARTITION__ +PREHOOK: Output: default@escape1@ds=1/part=` +PREHOOK: Output: default@escape1@ds=1/part=a +PREHOOK: Output: default@escape1@ds=1/part=b +PREHOOK: Output: default@escape1@ds=1/part=c +PREHOOK: Output: default@escape1@ds=1/part=d +PREHOOK: Output: default@escape1@ds=1/part=e +PREHOOK: Output: default@escape1@ds=1/part=f +PREHOOK: Output: default@escape1@ds=1/part=g +PREHOOK: Output: default@escape1@ds=1/part=h +PREHOOK: Output: default@escape1@ds=1/part=i +PREHOOK: Output: default@escape1@ds=1/part=j +PREHOOK: Output: default@escape1@ds=1/part=k +PREHOOK: Output: default@escape1@ds=1/part=l +PREHOOK: Output: default@escape1@ds=1/part=m +PREHOOK: Output: default@escape1@ds=1/part=n +PREHOOK: Output: default@escape1@ds=1/part=o +PREHOOK: Output: default@escape1@ds=1/part=p +PREHOOK: Output: default@escape1@ds=1/part=q +PREHOOK: Output: default@escape1@ds=1/part=r +PREHOOK: Output: default@escape1@ds=1/part=s +PREHOOK: Output: default@escape1@ds=1/part=t +PREHOOK: Output: default@escape1@ds=1/part=u +PREHOOK: Output: default@escape1@ds=1/part=v +PREHOOK: Output: default@escape1@ds=1/part=w +PREHOOK: Output: default@escape1@ds=1/part=x +PREHOOK: Output: default@escape1@ds=1/part=y +PREHOOK: Output: default@escape1@ds=1/part=z +PREHOOK: Output: default@escape1@ds=1/part=| +PREHOOK: Output: default@escape1@ds=1/part=} +PREHOOK: Output: default@escape1@ds=1/part=~ POSTHOOK: query: ALTER TABLE escape1 DROP PARTITION (ds='1') POSTHOOK: type: ALTERTABLE_DROPPARTS POSTHOOK: Input: default@escape1 Index: ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out (revision 0) +++ ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out (working copy) @@ -0,0 +1,18 @@ +PREHOOK: query: create table ptestfilter1 (a string, b int) partitioned by (c string, d string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table ptestfilter1 (a string, b int) partitioned by (c string, d string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@ptestfilter1 +PREHOOK: query: alter table ptestfilter1 add partition (c='US', d=1) +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@ptestfilter1 +POSTHOOK: query: alter table ptestfilter1 add partition (c='US', d=1) +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@ptestfilter1 +POSTHOOK: Output: default@ptestfilter1@c=US/d=1 +PREHOOK: query: show partitions ptestfilter1 +PREHOOK: type: SHOWPARTITIONS +POSTHOOK: query: show partitions ptestfilter1 +POSTHOOK: type: SHOWPARTITIONS +c=US/d=1 +FAILED: Error in semantic analysis: Partition not found c = 'US' AND d < 1 Index: ql/src/test/results/clientnegative/drop_partition_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_partition_failure.q.out (revision 1307617) +++ ql/src/test/results/clientnegative/drop_partition_failure.q.out (working copy) @@ -31,4 +31,4 @@ b=1/c=1 b=1/c=2 b=2/c=2 -FAILED: Error in semantic analysis: Line 3:31 Partition not found ''3'' +FAILED: Error in semantic analysis: Partition not found b = '3' Index: ql/src/test/queries/clientpositive/drop_partitions_filter.q =================================================================== --- ql/src/test/queries/clientpositive/drop_partitions_filter.q (revision 0) +++ ql/src/test/queries/clientpositive/drop_partitions_filter.q (working copy) @@ -0,0 +1,37 @@ +create table ptestfilter (a string, b int) partitioned by (c string, d string); +describe ptestfilter; + +alter table ptestfilter add partition (c='US', d=1); +alter table ptestfilter add partition (c='US', d=2); +alter table ptestFilter add partition (c='Uganda', d=2); +alter table ptestfilter add partition (c='Germany', d=2); +alter table ptestfilter add partition (c='Canada', d=3); +alter table ptestfilter add partition (c='Russia', d=3); +alter table ptestfilter add partition (c='Greece', d=2); +alter table ptestfilter add partition (c='India', d=3); +alter table ptestfilter add partition (c='France', d=4); +show partitions ptestfilter; + +alter table ptestfilter drop partition (c='US', d<'2'); +show partitions ptestfilter; + +alter table ptestfilter drop partition (c>='US', d<='2'); +show partitions ptestfilter; + +alter table ptestfilter drop partition (c >'India'); +show partitions ptestfilter; + +alter table ptestfilter drop partition (c >='India'), + partition (c='Greece', d='2'); +show partitions ptestfilter; + +alter table ptestfilter drop partition (c != 'France'); +show partitions ptestfilter; + +set hive.exec.drop.ignorenonexistent=false; +alter table ptestfilter drop if exists partition (c='US'); +show partitions ptestfilter; + +drop table ptestfilter; + + Index: ql/src/test/queries/clientnegative/drop_partition_filter_failure.q =================================================================== --- ql/src/test/queries/clientnegative/drop_partition_filter_failure.q (revision 0) +++ ql/src/test/queries/clientnegative/drop_partition_filter_failure.q (working copy) @@ -0,0 +1,8 @@ +create table ptestfilter1 (a string, b int) partitioned by (c string, d string); + +alter table ptestfilter1 add partition (c='US', d=1); +show partitions ptestfilter1; + +set hive.exec.drop.ignorenonexistent=false; +alter table ptestfilter1 drop partition (c='US', d<1); + Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (revision 1307617) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (working copy) @@ -20,9 +20,7 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; /** * DropTableDesc. @@ -33,7 +31,7 @@ private static final long serialVersionUID = 1L; String tableName; - ArrayList> partSpecs; + ArrayList partSpecs; boolean expectView; boolean ifExists; @@ -50,13 +48,12 @@ this.ifExists = ifExists; } - public DropTableDesc(String tableName, - List> partSpecs, boolean expectView) { - + public DropTableDesc(String tableName, List partSpecs, boolean expectView) { + this.tableName = tableName; - this.partSpecs = new ArrayList>(partSpecs.size()); + this.partSpecs = new ArrayList(partSpecs.size()); for (int i = 0; i < partSpecs.size(); i++) { - this.partSpecs.add(new LinkedHashMap(partSpecs.get(i))); + this.partSpecs.add(partSpecs.get(i)); } this.expectView = expectView; } @@ -80,7 +77,7 @@ /** * @return the partSpecs */ - public ArrayList> getPartSpecs() { + public ArrayList getPartSpecs() { return partSpecs; } @@ -88,7 +85,7 @@ * @param partSpecs * the partSpecs to set */ - public void setPartSpecs(ArrayList> partSpecs) { + public void setPartSpecs(ArrayList partSpecs) { this.partSpecs = partSpecs; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionSpec.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionSpec.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionSpec.java (working copy) @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * PartitionSpec + * + */ +@Explain(displayName = "Partition specification") +public class PartitionSpec { + + private class PredicateSpec { + private String operator; + private String value; + + public PredicateSpec() { + } + + public PredicateSpec(String operator, String value) { + this.operator = operator; + this.value = value; + } + + public String getOperator() { + return this.operator; + } + + public String getValue() { + return this.value; + } + + public void setOperator(String operator) { + this.operator = operator; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public String toString() { + return (((this.operator.equals("!="))? "<>": this.operator) + " " + this.value); + } + } + + private Map partSpec; + + public PartitionSpec() { + this.partSpec = new LinkedHashMap(); + } + + /** + * @param key + * partition key name for one partition key compare in the spec + * @param operator + * the operator that is used for the comparison + * @param value + * the value to be compared against + */ + public void addPredicate(String key, String operator, String value) { + partSpec.put(key, new PredicateSpec(operator, value)); + } + + /** + * @param key + * partition key to look for in the partition spec + * @return true if key exists in the partition spec, false otherwise + */ + public boolean existsKey(String key) { + return (partSpec.get(key) != null); + } + + @Override + public String toString() { + StringBuilder filterString = new StringBuilder(); + int count = 0; + for (Map.Entry entry: this.partSpec.entrySet()) { + if (count > 0) { + filterString.append(" AND "); + } + filterString.append(entry.getKey() + " " + entry.getValue().toString()); + count++; + } + return filterString.toString(); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1307617) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -686,8 +686,8 @@ alterStatementSuffixDropPartitions @init { msgs.push("drop partition statement"); } @after { msgs.pop(); } - : Identifier KW_DROP ifExists? partitionSpec (COMMA partitionSpec)* - -> ^(TOK_ALTERTABLE_DROPPARTS Identifier partitionSpec+ ifExists?) + : Identifier KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* + -> ^(TOK_ALTERTABLE_DROPPARTS Identifier dropPartitionSpec+ ifExists?) ; alterStatementSuffixProperties @@ -2069,6 +2069,22 @@ Identifier (EQUAL constant)? -> ^(TOK_PARTVAL Identifier constant?) ; +dropPartitionSpec + : + KW_PARTITION + LPAREN dropPartitionVal (COMMA dropPartitionVal )* RPAREN -> ^(TOK_PARTSPEC dropPartitionVal +) + ; + +dropPartitionVal + : + Identifier dropPartitionOperator constant -> ^(TOK_PARTVAL Identifier dropPartitionOperator constant) + ; + +dropPartitionOperator + : + EQUAL | NOTEQUAL | LESSTHANOREQUALTO | LESSTHAN | GREATERTHANOREQUALTO | GREATERTHAN + ; + sysFuncNames : KW_AND Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1307617) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -32,9 +32,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import java.util.Set; -import java.util.Map.Entry; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; @@ -58,8 +58,8 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.index.HiveIndex; +import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; -import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -70,7 +70,9 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -88,6 +90,7 @@ import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; +import org.apache.hadoop.hive.ql.plan.PartitionSpec; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; @@ -106,8 +109,6 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; import org.apache.hadoop.hive.ql.session.SessionState; @@ -1767,7 +1768,7 @@ String tblName = getUnescapedName((ASTNode)ast.getChild(0)); // get table metadata - List> partSpecs = getPartitionSpecs(ast); + List partSpecs = getFullPartitionSpecs(ast); DropTableDesc dropTblDesc = new DropTableDesc(tblName, partSpecs, expectView); @@ -1786,7 +1787,7 @@ // configured not to fail silently boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT); - addTablePartsOutputs(tblName, partSpecs, throwException, false, ast); + addTableDropPartsOutputs(tblName, partSpecs, throwException); } rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -2071,6 +2072,39 @@ } /** + * Get the partition specs from the tree. This stores the full specification + * with the comparator operator into the output list. + * + * @param ast + * Tree to extract partitions from. + * @return A list of PartitionSpec objects which contain the mapping from + * key to operator and value. + * @throws SemanticException + */ + private List getFullPartitionSpecs(CommonTree ast) + throws SemanticException { + List partSpecList = new ArrayList(); + + for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) { + Tree partSpecTree = ast.getChild(childIndex); + if (partSpecTree.getType() == HiveParser.TOK_PARTSPEC) { + PartitionSpec partSpec = new PartitionSpec(); + + for (int i = 0; i < partSpecTree.getChildCount(); ++i) { + CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i); + assert(partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL); + String key = partSpecSingleKey.getChild(0).getText().toLowerCase(); + String operator = partSpecSingleKey.getChild(1).getText(); + String val = partSpecSingleKey.getChild(2).getText(); + partSpec.addPredicate(key, operator, val); + } + + partSpecList.add(partSpec); + } + } + return partSpecList; + } + /** * Certain partition values are are used by hive. e.g. the default partition * in dynamic partitioning and the intermediate partition values used in the * archiving process. Naturally, prohibit the user from creating partitions @@ -2129,7 +2163,7 @@ for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) { Map partSpec = i.next(); List parts = null; - if(allowMany) { + if (allowMany) { try { parts = db.getPartitions(tab, partSpec); } catch (HiveException e) { @@ -2157,4 +2191,39 @@ } } + /** + * Add the table partitions to be modified in the output, so that it is available for the + * pre-execution hook. If the partition does not exist, throw an error if + * throwIfNonExistent is true, otherwise ignore it. + */ + private void addTableDropPartsOutputs(String tblName, List partSpecs, + boolean throwIfNonExistent) + throws SemanticException { + Table tab; + try { + tab = db.getTable(tblName); + } catch (HiveException e) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); + } + + Iterator i; + int index; + for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) { + PartitionSpec partSpec = i.next(); + List parts = null; + try { + parts = db.getPartitionsByFilter(tab, partSpec.toString()); + } catch (Exception e) { + throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e); + } + if (parts.isEmpty()) { + if(throwIfNonExistent) { + throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString())); + } + } + for(Partition p: parts) { + outputs.add(new WriteEntity(p)); + } + } + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1307617) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -117,6 +117,7 @@ import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; +import org.apache.hadoop.hive.ql.plan.PartitionSpec; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; @@ -3132,7 +3133,8 @@ * @throws HiveException * Throws this exception if an unexpected error occurs. */ - private int dropTable(Hive db, DropTableDesc dropTbl) throws HiveException { + private int dropTable(Hive db, DropTableDesc dropTbl) + throws HiveException { // We need to fetch the table before it is dropped so that it can be passed // to // post-execution hook @@ -3195,13 +3197,18 @@ } List partsToDelete = new ArrayList(); - for (Map partSpec : dropTbl.getPartSpecs()) { - List partitions = db.getPartitions(tbl, partSpec); + for (PartitionSpec partSpec : dropTbl.getPartSpecs()) { + List partitions = null; + try { + partitions = db.getPartitionsByFilter(tbl, partSpec.toString()); + } catch (Exception e) { + throw new HiveException(e); + } // this is to prevent dropping archived partition which is archived in a // different level the drop command specified. int partPrefixToDrop = 0; for (FieldSchema fs : tbl.getPartCols()) { - if (partSpec.get(fs.getName()) != null) { + if (partSpec.existsKey(fs.getName())) { partPrefixToDrop += 1; } else { break;