diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1271799907..17016861f7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2734,6 +2734,16 @@ void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn, case HiveParser.TOK_CHARSETLITERAL: case HiveParser.KW_TRUE: case HiveParser.KW_FALSE: + case HiveParser.TOK_INTERVAL_DAY_LITERAL: + case HiveParser.TOK_INTERVAL_DAY_TIME: + case HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL: + case HiveParser.TOK_INTERVAL_HOUR_LITERAL: + case HiveParser.TOK_INTERVAL_MINUTE_LITERAL: + case HiveParser.TOK_INTERVAL_MONTH_LITERAL: + case HiveParser.TOK_INTERVAL_SECOND_LITERAL: + case HiveParser.TOK_INTERVAL_YEAR_LITERAL: + case HiveParser.TOK_INTERVAL_YEAR_MONTH: + case HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL: break; case HiveParser.TOK_FUNCTION: diff --git a/ql/src/test/queries/clientpositive/interval_3.q b/ql/src/test/queries/clientpositive/interval_3.q index ee0f83cdee..f32793e2fa 100644 --- a/ql/src/test/queries/clientpositive/interval_3.q +++ b/ql/src/test/queries/clientpositive/interval_3.q @@ -38,3 +38,26 @@ from ) b on a.interval1 = b.interval2 and a.l_orderkey = b.l_orderkey order by a.l_orderkey; + +-- interval literal in join condition +create table date_dim_d1( + d_week_seq int, + d_date string); + + EXPLAIN SELECT + d1.d_week_seq + FROM + date_dim_d1 d1 + JOIN date_dim_d1 d3 + WHERE + Cast(d3.d_date AS date) > Cast(d1.d_date AS date) + + INTERVAL '1' year + + INTERVAL '2' month + + INTERVAL '5' day + + INTERVAL '4' hour + + INTERVAL '10' minute + + INTERVAL '9' second + AND Cast(d3.d_date AS date) < Cast(d1.d_date AS date) + INTERVAL '1-2' YEAR TO MONTH; + + DROP table date_dim_d1; + diff --git a/ql/src/test/results/clientpositive/interval_3.q.out b/ql/src/test/results/clientpositive/interval_3.q.out index ac71514940..97eec124f0 100644 --- a/ql/src/test/results/clientpositive/interval_3.q.out +++ b/ql/src/test/results/clientpositive/interval_3.q.out @@ -96,3 +96,114 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@lineitem #### A masked pattern was here #### 37 37 26 00:00:00.000000000 +PREHOOK: query: create table date_dim_d1( + d_week_seq int, + d_date string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@date_dim_d1 +POSTHOOK: query: create table date_dim_d1( + d_week_seq int, + d_date string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@date_dim_d1 +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: EXPLAIN SELECT + d1.d_week_seq + FROM + date_dim_d1 d1 + JOIN date_dim_d1 d3 + WHERE + Cast(d3.d_date AS date) > Cast(d1.d_date AS date) + + INTERVAL '1' year + + INTERVAL '2' month + + INTERVAL '5' day + + INTERVAL '4' hour + + INTERVAL '10' minute + + INTERVAL '9' second + AND Cast(d3.d_date AS date) < Cast(d1.d_date AS date) + INTERVAL '1-2' YEAR TO MONTH +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT + d1.d_week_seq + FROM + date_dim_d1 d1 + JOIN date_dim_d1 d3 + WHERE + Cast(d3.d_date AS date) > Cast(d1.d_date AS date) + + INTERVAL '1' year + + INTERVAL '2' month + + INTERVAL '5' day + + INTERVAL '4' hour + + INTERVAL '10' minute + + INTERVAL '9' second + AND Cast(d3.d_date AS date) < Cast(d1.d_date AS date) + INTERVAL '1-2' YEAR TO MONTH +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: d1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: d_week_seq (type: int), d_date (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: string) + TableScan + alias: d3 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: d_date (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: ((CAST( CAST( _col2 AS DATE) AS TIMESTAMP) > ((((((CAST( _col1 AS DATE) + INTERVAL'1-0') + INTERVAL'0-2') + INTERVAL'5 00:00:00.000000000') + INTERVAL'0 04:00:00.000000000') + INTERVAL'0 00:10:00.000000000') + INTERVAL'0 00:00:09.000000000')) and (CAST( _col2 AS DATE) < (CAST( _col1 AS DATE) + INTERVAL'1-2'))) (type: boolean) + Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: _col0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: DROP table date_dim_d1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@date_dim_d1 +PREHOOK: Output: default@date_dim_d1 +POSTHOOK: query: DROP table date_dim_d1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@date_dim_d1 +POSTHOOK: Output: default@date_dim_d1