diff --git contrib/src/test/results/clientpositive/serde_typedbytes.q.out contrib/src/test/results/clientpositive/serde_typedbytes.q.out index 2cc10b1..8c22399 100644 --- contrib/src/test/results/clientpositive/serde_typedbytes.q.out +++ contrib/src/test/results/clientpositive/serde_typedbytes.q.out @@ -63,26 +63,19 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git contrib/src/test/results/clientpositive/serde_typedbytes5.q.out contrib/src/test/results/clientpositive/serde_typedbytes5.q.out index 0401db1..ece8e43 100644 --- contrib/src/test/results/clientpositive/serde_typedbytes5.q.out +++ contrib/src/test/results/clientpositive/serde_typedbytes5.q.out @@ -63,26 +63,19 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git hbase-handler/src/test/results/positive/hbase_queries.q.out hbase-handler/src/test/results/positive/hbase_queries.q.out index 34e0a01..ae961c4 100644 --- hbase-handler/src/test/results/positive/hbase_queries.q.out +++ hbase-handler/src/test/results/positive/hbase_queries.q.out @@ -54,26 +54,19 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat - output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat - serde: org.apache.hadoop.hive.hbase.HBaseSerDe - name: default.hbase_table_1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat + output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat + serde: org.apache.hadoop.hive.hbase.HBaseSerDe + name: default.hbase_table_1 PREHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0 @@ -646,26 +639,17 @@ STAGE PLANS: type: int expr: _col1 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat - output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat - serde: org.apache.hadoop.hive.hbase.HBaseSerDe - name: default.hbase_table_3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat + output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat + serde: org.apache.hadoop.hive.hbase.HBaseSerDe + name: default.hbase_table_3 PREHOOK: query: INSERT OVERWRITE TABLE hbase_table_3 diff --git ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out index cb5a156..7f47704 100644 --- ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out +++ ql/src/test/results/clientnegative/bucket_mapjoin_mismatch1.q.out @@ -216,22 +216,13 @@ STAGE PLANS: type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientnegative/sortmerge_mapjoin_mismatch_1.q.out ql/src/test/results/clientnegative/sortmerge_mapjoin_mismatch_1.q.out index 35f03c3..7e26a76 100644 --- ql/src/test/results/clientnegative/sortmerge_mapjoin_mismatch_1.q.out +++ ql/src/test/results/clientnegative/sortmerge_mapjoin_mismatch_1.q.out @@ -115,24 +115,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/alias_casted_column.q.out ql/src/test/results/clientpositive/alias_casted_column.q.out index 9cd7a1a..89853f9 100644 --- ql/src/test/results/clientpositive/alias_casted_column.q.out +++ ql/src/test/results/clientpositive/alias_casted_column.q.out @@ -23,17 +23,12 @@ STAGE PLANS: expr: UDFToInteger(key) type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -65,17 +60,12 @@ STAGE PLANS: expr: UDFToInteger(key) type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/ambiguous_col.q.out ql/src/test/results/clientpositive/ambiguous_col.q.out index 7a877f3..d0c92b0 100644 --- ql/src/test/results/clientpositive/ambiguous_col.q.out +++ ql/src/test/results/clientpositive/ambiguous_col.q.out @@ -71,20 +71,13 @@ STAGE PLANS: type: string expr: _col1 type: string - outputColumnNames: _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -158,18 +151,13 @@ STAGE PLANS: expressions: expr: _col0 type: string - outputColumnNames: _col1 - Select Operator - expressions: - expr: _col1 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -243,18 +231,13 @@ STAGE PLANS: expressions: expr: _col0 type: string - outputColumnNames: _col1 - Select Operator - expressions: - expr: _col1 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/auto_join1.q.out ql/src/test/results/clientpositive/auto_join1.q.out index 71bae7c..58eaaf0 100644 --- ql/src/test/results/clientpositive/auto_join1.q.out +++ ql/src/test/results/clientpositive/auto_join1.q.out @@ -68,26 +68,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work @@ -144,26 +137,19 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work @@ -211,27 +197,19 @@ STAGE PLANS: outputColumnNames: _col0, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value diff --git ql/src/test/results/clientpositive/auto_join12.q.out ql/src/test/results/clientpositive/auto_join12.q.out index c647347..4e1913d 100644 --- ql/src/test/results/clientpositive/auto_join12.q.out +++ ql/src/test/results/clientpositive/auto_join12.q.out @@ -79,7 +79,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 80.0) and (key < 100.0)) + expr: ((key < 100.0) and (key < 80.0)) type: boolean Select Operator expressions: @@ -106,7 +106,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 100.0) and (key < 80.0)) + expr: ((key < 80.0) and (key < 100.0)) type: boolean Select Operator expressions: @@ -194,7 +194,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 100.0) and (key < 80.0)) + expr: ((key < 80.0) and (key < 100.0)) type: boolean Select Operator expressions: @@ -217,7 +217,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 80.0) and (key < 100.0)) + expr: ((key < 100.0) and (key < 80.0)) type: boolean Select Operator expressions: @@ -305,7 +305,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 100.0) and (key < 80.0)) + expr: ((key < 80.0) and (key < 100.0)) type: boolean Select Operator expressions: @@ -357,7 +357,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 80.0) and (key < 100.0)) + expr: ((key < 100.0) and (key < 80.0)) type: boolean Select Operator expressions: @@ -409,7 +409,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 100.0) and (key < 80.0)) + expr: ((key < 80.0) and (key < 100.0)) type: boolean Select Operator expressions: @@ -459,7 +459,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 80.0) and (key < 100.0)) + expr: ((key < 100.0) and (key < 80.0)) type: boolean Select Operator expressions: diff --git ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out index 8bada02..b75f26d 100644 --- ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out +++ ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out @@ -80,26 +80,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -164,26 +157,19 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -239,26 +225,19 @@ STAGE PLANS: outputColumnNames: _col0, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 diff --git ql/src/test/results/clientpositive/auto_join17.q.out ql/src/test/results/clientpositive/auto_join17.q.out index eb38e95..0606f7c 100644 --- ql/src/test/results/clientpositive/auto_join17.q.out +++ ql/src/test/results/clientpositive/auto_join17.q.out @@ -68,34 +68,23 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -152,34 +141,23 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -231,34 +209,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) diff --git ql/src/test/results/clientpositive/auto_join19.q.out ql/src/test/results/clientpositive/auto_join19.q.out index 3773708..269efe7 100644 --- ql/src/test/results/clientpositive/auto_join19.q.out +++ ql/src/test/results/clientpositive/auto_join19.q.out @@ -70,26 +70,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -146,26 +139,19 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -217,26 +203,19 @@ STAGE PLANS: outputColumnNames: _col0, _col2, _col3, _col7 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) diff --git ql/src/test/results/clientpositive/auto_join2.q.out ql/src/test/results/clientpositive/auto_join2.q.out index bff2736..0cdf13d 100644 --- ql/src/test/results/clientpositive/auto_join2.q.out +++ ql/src/test/results/clientpositive/auto_join2.q.out @@ -122,26 +122,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j2 Local Work: Map Reduce Local Work @@ -196,26 +189,19 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j2 Local Work: Map Reduce Local Work @@ -261,26 +247,19 @@ STAGE PLANS: outputColumnNames: _col4, _col9 Select Operator expressions: - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j2 Stage: Stage-15 Map Reduce Local Work diff --git ql/src/test/results/clientpositive/auto_join20.q.out ql/src/test/results/clientpositive/auto_join20.q.out index 573b283..727b7c5 100644 --- ql/src/test/results/clientpositive/auto_join20.q.out +++ ql/src/test/results/clientpositive/auto_join20.q.out @@ -396,7 +396,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 15.0)) + expr: ((key < 15.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -418,7 +418,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key < 15.0) and (key < 10.0)) + expr: ((key < 10.0) and (key < 15.0)) type: boolean HashTable Sink Operator condition expressions: @@ -585,7 +585,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 15.0)) + expr: ((key < 15.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -606,7 +606,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key < 15.0) and (key < 10.0)) + expr: ((key < 10.0) and (key < 15.0)) type: boolean Reduce Output Operator key expressions: diff --git ql/src/test/results/clientpositive/auto_join22.q.out ql/src/test/results/clientpositive/auto_join22.q.out index f4b9cf4..b40dc5c 100644 --- ql/src/test/results/clientpositive/auto_join22.q.out +++ ql/src/test/results/clientpositive/auto_join22.q.out @@ -125,23 +125,18 @@ STAGE PLANS: expr: _col7 type: string outputColumnNames: _col3 - Select Operator - expressions: - expr: _col3 - type: string - outputColumnNames: _col3 - Group By Operator - aggregations: - expr: sum(hash(_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -215,23 +210,18 @@ STAGE PLANS: expr: _col7 type: string outputColumnNames: _col3 - Select Operator - expressions: - expr: _col3 - type: string - outputColumnNames: _col3 - Group By Operator - aggregations: - expr: sum(hash(_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -277,23 +267,18 @@ STAGE PLANS: expr: _col7 type: string outputColumnNames: _col3 - Select Operator - expressions: - expr: _col3 - type: string - outputColumnNames: _col3 - Group By Operator - aggregations: - expr: sum(hash(_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-15 Map Reduce Local Work diff --git ql/src/test/results/clientpositive/auto_join26.q.out ql/src/test/results/clientpositive/auto_join26.q.out index da773dd..2b52d57 100644 --- ql/src/test/results/clientpositive/auto_join26.q.out +++ ql/src/test/results/clientpositive/auto_join26.q.out @@ -118,26 +118,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/auto_join28.q.out ql/src/test/results/clientpositive/auto_join28.q.out index c0e0f27..5d8e3b7 100644 --- ql/src/test/results/clientpositive/auto_join28.q.out +++ ql/src/test/results/clientpositive/auto_join28.q.out @@ -224,7 +224,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: diff --git ql/src/test/results/clientpositive/auto_join29.q.out ql/src/test/results/clientpositive/auto_join29.q.out index 87498b2..3321335 100644 --- ql/src/test/results/clientpositive/auto_join29.q.out +++ ql/src/test/results/clientpositive/auto_join29.q.out @@ -2933,7 +2933,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -3088,7 +3088,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -6370,7 +6370,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -6388,7 +6388,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -6410,7 +6410,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Map Join Operator condition map: @@ -6508,7 +6508,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -6526,7 +6526,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -6548,7 +6548,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean Map Join Operator condition map: @@ -6597,7 +6597,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -6618,7 +6618,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean Reduce Output Operator key expressions: @@ -6639,7 +6639,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -6737,7 +6737,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -6759,7 +6759,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -6876,7 +6876,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -6897,7 +6897,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean Reduce Output Operator key expressions: @@ -7516,7 +7516,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -7538,7 +7538,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -7672,7 +7672,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean Reduce Output Operator key expressions: @@ -7693,7 +7693,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -8254,7 +8254,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -8272,7 +8272,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -8294,7 +8294,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Map Join Operator condition map: @@ -8392,7 +8392,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -8410,7 +8410,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -8432,7 +8432,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean Map Join Operator condition map: @@ -8488,7 +8488,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -8506,7 +8506,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -8528,7 +8528,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Map Join Operator condition map: @@ -8577,7 +8577,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -8598,7 +8598,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key > 10.0) and (key < 10.0)) + expr: ((key < 10.0) and (key > 10.0)) type: boolean Reduce Output Operator key expressions: @@ -8619,7 +8619,7 @@ STAGE PLANS: alias: src3 Filter Operator predicate: - expr: ((key < 10.0) and (key > 10.0)) + expr: ((key > 10.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: diff --git ql/src/test/results/clientpositive/auto_join3.q.out ql/src/test/results/clientpositive/auto_join3.q.out index 886a3f1..2de53eb 100644 --- ql/src/test/results/clientpositive/auto_join3.q.out +++ ql/src/test/results/clientpositive/auto_join3.q.out @@ -92,26 +92,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -190,26 +183,19 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -275,26 +261,19 @@ STAGE PLANS: Position of Big Table: 2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -356,26 +335,19 @@ STAGE PLANS: outputColumnNames: _col0, _col9 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key) diff --git ql/src/test/results/clientpositive/auto_join4.q.out ql/src/test/results/clientpositive/auto_join4.q.out index 28ee1fe..b8650ea 100644 --- ql/src/test/results/clientpositive/auto_join4.q.out +++ ql/src/test/results/clientpositive/auto_join4.q.out @@ -110,45 +110,23 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -235,45 +213,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM ( diff --git ql/src/test/results/clientpositive/auto_join5.q.out ql/src/test/results/clientpositive/auto_join5.q.out index 2890c9b..473c489 100644 --- ql/src/test/results/clientpositive/auto_join5.q.out +++ ql/src/test/results/clientpositive/auto_join5.q.out @@ -110,45 +110,23 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -235,45 +213,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM ( diff --git ql/src/test/results/clientpositive/auto_join6.q.out ql/src/test/results/clientpositive/auto_join6.q.out index 9dd4744..af3d09f 100644 --- ql/src/test/results/clientpositive/auto_join6.q.out +++ ql/src/test/results/clientpositive/auto_join6.q.out @@ -112,45 +112,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/auto_join7.q.out ql/src/test/results/clientpositive/auto_join7.q.out index ba566b5..752d51d 100644 --- ql/src/test/results/clientpositive/auto_join7.q.out +++ ql/src/test/results/clientpositive/auto_join7.q.out @@ -152,57 +152,27 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - expr: UDFToInteger(_col4) - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/auto_join8.q.out ql/src/test/results/clientpositive/auto_join8.q.out index 9f49172..ba85f77 100644 --- ql/src/test/results/clientpositive/auto_join8.q.out +++ ql/src/test/results/clientpositive/auto_join8.q.out @@ -114,45 +114,23 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -243,45 +221,23 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM ( diff --git ql/src/test/results/clientpositive/auto_join9.q.out ql/src/test/results/clientpositive/auto_join9.q.out index 74930df..bf358de 100644 --- ql/src/test/results/clientpositive/auto_join9.q.out +++ ql/src/test/results/clientpositive/auto_join9.q.out @@ -68,26 +68,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -144,26 +137,19 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -215,26 +201,19 @@ STAGE PLANS: outputColumnNames: _col0, _col2, _col3, _col7 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) diff --git ql/src/test/results/clientpositive/auto_smb_mapjoin_14.q.out ql/src/test/results/clientpositive/auto_smb_mapjoin_14.q.out index 82952fe..ac89111 100644 --- ql/src/test/results/clientpositive/auto_smb_mapjoin_14.q.out +++ ql/src/test/results/clientpositive/auto_smb_mapjoin_14.q.out @@ -76,19 +76,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -190,32 +189,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -227,19 +221,18 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -382,32 +375,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -541,32 +529,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -817,35 +800,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -972,35 +950,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1631,36 +1604,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 - Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -2211,24 +2178,17 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-8 Conditional Operator diff --git ql/src/test/results/clientpositive/auto_sortmerge_join_9.q.out ql/src/test/results/clientpositive/auto_sortmerge_join_9.q.out index faf1560..4d0b634 100644 --- ql/src/test/results/clientpositive/auto_sortmerge_join_9.q.out +++ ql/src/test/results/clientpositive/auto_sortmerge_join_9.q.out @@ -76,19 +76,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -188,32 +187,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -355,32 +349,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -392,19 +381,18 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -552,32 +540,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -818,32 +801,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1094,35 +1072,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1249,35 +1222,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -2145,36 +2113,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 - Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/binarysortable_1.q.out ql/src/test/results/clientpositive/binarysortable_1.q.out index b454e93..7863964 100644 Binary files ql/src/test/results/clientpositive/binarysortable_1.q.out and ql/src/test/results/clientpositive/binarysortable_1.q.out differ diff --git ql/src/test/results/clientpositive/bucket_groupby.q.out ql/src/test/results/clientpositive/bucket_groupby.q.out index ebb9641..0ff90d1 100644 --- ql/src/test/results/clientpositive/bucket_groupby.q.out +++ ql/src/test/results/clientpositive/bucket_groupby.q.out @@ -614,32 +614,27 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/bucket_map_join_1.q.out ql/src/test/results/clientpositive/bucket_map_join_1.q.out index 7193da8..56131b0 100644 --- ql/src/test/results/clientpositive/bucket_map_join_1.q.out +++ ql/src/test/results/clientpositive/bucket_map_join_1.q.out @@ -95,19 +95,18 @@ STAGE PLANS: 1 [Column[key], Column[value]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucket_map_join_2.q.out ql/src/test/results/clientpositive/bucket_map_join_2.q.out index 7efa552..1e7bea5 100644 --- ql/src/test/results/clientpositive/bucket_map_join_2.q.out +++ ql/src/test/results/clientpositive/bucket_map_join_2.q.out @@ -95,19 +95,18 @@ STAGE PLANS: 1 [Column[key], Column[value]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketcontext_1.q.out ql/src/test/results/clientpositive/bucketcontext_1.q.out index 08d6910..43e34ce 100644 --- ql/src/test/results/clientpositive/bucketcontext_1.q.out +++ ql/src/test/results/clientpositive/bucketcontext_1.q.out @@ -134,19 +134,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -335,19 +334,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_2.q.out ql/src/test/results/clientpositive/bucketcontext_2.q.out index 295c53f..ab44de5 100644 --- ql/src/test/results/clientpositive/bucketcontext_2.q.out +++ ql/src/test/results/clientpositive/bucketcontext_2.q.out @@ -122,19 +122,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -323,19 +322,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_3.q.out ql/src/test/results/clientpositive/bucketcontext_3.q.out index 1ad3fb5..592765a 100644 --- ql/src/test/results/clientpositive/bucketcontext_3.q.out +++ ql/src/test/results/clientpositive/bucketcontext_3.q.out @@ -122,19 +122,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -274,19 +273,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_4.q.out ql/src/test/results/clientpositive/bucketcontext_4.q.out index a08fc2e..6fc94a7 100644 --- ql/src/test/results/clientpositive/bucketcontext_4.q.out +++ ql/src/test/results/clientpositive/bucketcontext_4.q.out @@ -134,19 +134,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -286,19 +285,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_5.q.out ql/src/test/results/clientpositive/bucketcontext_5.q.out index 6878a42..8eb9a71 100644 --- ql/src/test/results/clientpositive/bucketcontext_5.q.out +++ ql/src/test/results/clientpositive/bucketcontext_5.q.out @@ -107,19 +107,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -251,19 +250,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_6.q.out ql/src/test/results/clientpositive/bucketcontext_6.q.out index 0e439e1..8271292 100644 --- ql/src/test/results/clientpositive/bucketcontext_6.q.out +++ ql/src/test/results/clientpositive/bucketcontext_6.q.out @@ -121,19 +121,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -320,19 +319,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_7.q.out ql/src/test/results/clientpositive/bucketcontext_7.q.out index 4b47e90..db9bb1d 100644 --- ql/src/test/results/clientpositive/bucketcontext_7.q.out +++ ql/src/test/results/clientpositive/bucketcontext_7.q.out @@ -147,19 +147,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -350,19 +349,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketcontext_8.q.out ql/src/test/results/clientpositive/bucketcontext_8.q.out index 7762413..21b5dc5 100644 --- ql/src/test/results/clientpositive/bucketcontext_8.q.out +++ ql/src/test/results/clientpositive/bucketcontext_8.q.out @@ -147,19 +147,18 @@ STAGE PLANS: Position of Big Table: 1 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -350,19 +349,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out index c892505..e4beebc 100644 --- ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out +++ ql/src/test/results/clientpositive/bucketizedhiveinputformat.q.out @@ -141,18 +141,13 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-3 Map Reduce diff --git ql/src/test/results/clientpositive/bucketmapjoin1.q.out ql/src/test/results/clientpositive/bucketmapjoin1.q.out index 91ded20..146cbd2 100644 --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out @@ -93,35 +93,24 @@ STAGE PLANS: type: string expr: _col6 type: string - expr: _col7 - type: string - outputColumnNames: _col0, _col1, _col6, _col7 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types int:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types int:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -223,35 +212,24 @@ STAGE PLANS: type: string expr: _col6 type: string - expr: _col7 - type: string - outputColumnNames: _col0, _col1, _col6, _col7 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types int:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types int:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -423,42 +401,31 @@ STAGE PLANS: type: string expr: _col5 type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col5, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -924,47 +891,36 @@ STAGE PLANS: type: string expr: _col5 type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col5, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + rawDataSize 8519 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin10.q.out ql/src/test/results/clientpositive/bucketmapjoin10.q.out index 93bf6f7..3466e6d 100644 --- ql/src/test/results/clientpositive/bucketmapjoin10.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin10.q.out @@ -163,19 +163,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin11.q.out ql/src/test/results/clientpositive/bucketmapjoin11.q.out index 8c74de1..1c12c09 100644 --- ql/src/test/results/clientpositive/bucketmapjoin11.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin11.q.out @@ -177,19 +177,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -418,19 +417,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin12.q.out ql/src/test/results/clientpositive/bucketmapjoin12.q.out index b87ac55..abf9783 100644 --- ql/src/test/results/clientpositive/bucketmapjoin12.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin12.q.out @@ -145,19 +145,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -330,19 +329,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin13.q.out ql/src/test/results/clientpositive/bucketmapjoin13.q.out index 381a19f..870cb35 100644 --- ql/src/test/results/clientpositive/bucketmapjoin13.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin13.q.out @@ -143,19 +143,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -398,19 +397,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -617,19 +615,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -838,19 +835,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin2.q.out ql/src/test/results/clientpositive/bucketmapjoin2.q.out index 60cf832..7f3fb3e 100644 --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out @@ -144,40 +144,31 @@ STAGE PLANS: type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -650,45 +641,36 @@ STAGE PLANS: type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 564 + rawDataSize 10503 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11067 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -1355,45 +1337,36 @@ STAGE PLANS: type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 564 + rawDataSize 10503 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11067 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin3.q.out ql/src/test/results/clientpositive/bucketmapjoin3.q.out index a6e8753..913e925 100644 --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out @@ -161,40 +161,31 @@ STAGE PLANS: type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -667,45 +658,36 @@ STAGE PLANS: type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 564 + rawDataSize 10503 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11067 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin4.q.out ql/src/test/results/clientpositive/bucketmapjoin4.q.out index cb1eae3..ae0be28 100644 --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out @@ -161,40 +161,31 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -652,45 +643,36 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + rawDataSize 8519 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin5.q.out ql/src/test/results/clientpositive/bucketmapjoin5.q.out index 1371685..29746b4 100644 --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out @@ -199,40 +199,31 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -753,45 +744,36 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 928 - rawDataSize 17038 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17966 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 928 + rawDataSize 17038 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 17966 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin7.q.out ql/src/test/results/clientpositive/bucketmapjoin7.q.out index 2da2ee2..b8ba7c0 100644 --- ql/src/test/results/clientpositive/bucketmapjoin7.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin7.q.out @@ -115,32 +115,25 @@ STAGE PLANS: type: int expr: _col7 type: string - outputColumnNames: _col0, _col7 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col7 - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types int:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types int:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin8.q.out ql/src/test/results/clientpositive/bucketmapjoin8.q.out index 4001106..2a5a5d5 100644 --- ql/src/test/results/clientpositive/bucketmapjoin8.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin8.q.out @@ -117,19 +117,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -318,19 +317,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin9.q.out ql/src/test/results/clientpositive/bucketmapjoin9.q.out index 1eb2069..c2db270 100644 --- ql/src/test/results/clientpositive/bucketmapjoin9.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin9.q.out @@ -115,19 +115,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false @@ -337,19 +336,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out index 37f54fc..a68123b 100644 --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out @@ -119,42 +119,31 @@ STAGE PLANS: type: string expr: _col5 type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col5, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out index 20e7546..37f4a48 100644 --- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out @@ -134,40 +134,31 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out index 5266561..2230fd1 100644 --- ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out +++ ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out @@ -179,35 +179,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -339,35 +328,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -493,35 +471,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -645,35 +612,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -797,35 +753,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -949,35 +894,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -1101,35 +1035,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -1253,35 +1176,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -1405,35 +1317,24 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/column_access_stats.q.out ql/src/test/results/clientpositive/column_access_stats.q.out index 59d83c6..c02a2c6 100644 --- ql/src/test/results/clientpositive/column_access_stats.q.out +++ ql/src/test/results/clientpositive/column_access_stats.q.out @@ -68,20 +68,15 @@ STAGE PLANS: expr: key type: string outputColumnNames: _col0 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + tag: -1 + value expressions: expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string Reduce Operator Tree: Extract File Output Operator @@ -130,20 +125,15 @@ STAGE PLANS: expr: key type: string outputColumnNames: _col0 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + tag: -1 + value expressions: expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string Reduce Operator Tree: Extract File Output Operator diff --git ql/src/test/results/clientpositive/create_view.q.out ql/src/test/results/clientpositive/create_view.q.out index 3d71761..e1e8cea 100644 --- ql/src/test/results/clientpositive/create_view.q.out +++ ql/src/test/results/clientpositive/create_view.q.out @@ -202,19 +202,12 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/groupby1.q.out ql/src/test/results/clientpositive/groupby1.q.out index 8f3064e..73171ba 100644 --- ql/src/test/results/clientpositive/groupby1.q.out +++ ql/src/test/results/clientpositive/groupby1.q.out @@ -89,26 +89,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby10.q.out ql/src/test/results/clientpositive/groupby10.q.out index aa3a348..0d867e6 100644 --- ql/src/test/results/clientpositive/groupby10.q.out +++ ql/src/test/results/clientpositive/groupby10.q.out @@ -128,28 +128,19 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: bigint - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -197,28 +188,19 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: double - expr: _col2 - type: double + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -437,28 +419,19 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: bigint - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -506,28 +479,19 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: double - expr: _col2 - type: double + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -772,28 +736,19 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: double - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -841,28 +796,19 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: double - expr: _col2 - type: double + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby11.q.out ql/src/test/results/clientpositive/groupby11.q.out index 7328494..07ecb34 100644 --- ql/src/test/results/clientpositive/groupby11.q.out +++ ql/src/test/results/clientpositive/groupby11.q.out @@ -123,28 +123,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -194,28 +185,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby1_map.q.out ql/src/test/results/clientpositive/groupby1_map.q.out index b42a6e7..b15ac58 100644 --- ql/src/test/results/clientpositive/groupby1_map.q.out +++ ql/src/test/results/clientpositive/groupby1_map.q.out @@ -64,26 +64,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby1_map_nomap.q.out ql/src/test/results/clientpositive/groupby1_map_nomap.q.out index b42a6e7..b15ac58 100644 --- ql/src/test/results/clientpositive/groupby1_map_nomap.q.out +++ ql/src/test/results/clientpositive/groupby1_map_nomap.q.out @@ -64,26 +64,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby1_map_skew.q.out ql/src/test/results/clientpositive/groupby1_map_skew.q.out index 3573681..c51758a 100644 --- ql/src/test/results/clientpositive/groupby1_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby1_map_skew.q.out @@ -98,26 +98,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby1_noskew.q.out ql/src/test/results/clientpositive/groupby1_noskew.q.out index 6e72ceb..896f72c 100644 --- ql/src/test/results/clientpositive/groupby1_noskew.q.out +++ ql/src/test/results/clientpositive/groupby1_noskew.q.out @@ -55,26 +55,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby2.q.out ql/src/test/results/clientpositive/groupby2.q.out index 7047d46..86cebc1 100644 --- ql/src/test/results/clientpositive/groupby2.q.out +++ ql/src/test/results/clientpositive/groupby2.q.out @@ -98,28 +98,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby2_map.q.out ql/src/test/results/clientpositive/groupby2_map.q.out index ec3148d..392463f 100644 --- ql/src/test/results/clientpositive/groupby2_map.q.out +++ ql/src/test/results/clientpositive/groupby2_map.q.out @@ -76,28 +76,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby2_map_multi_distinct.q.out ql/src/test/results/clientpositive/groupby2_map_multi_distinct.q.out index b587336..06dc53a 100644 --- ql/src/test/results/clientpositive/groupby2_map_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby2_map_multi_distinct.q.out @@ -84,36 +84,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby2_map_skew.q.out ql/src/test/results/clientpositive/groupby2_map_skew.q.out index 608ae0c..6bd6ac2 100644 --- ql/src/test/results/clientpositive/groupby2_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby2_map_skew.q.out @@ -115,28 +115,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby2_noskew.q.out ql/src/test/results/clientpositive/groupby2_noskew.q.out index fc3a481..9347270 100644 --- ql/src/test/results/clientpositive/groupby2_noskew.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew.q.out @@ -59,28 +59,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out index 49823fa..f697ea0 100644 --- ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby2_noskew_multi_distinct.q.out @@ -64,36 +64,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby3.q.out ql/src/test/results/clientpositive/groupby3.q.out index f2da7de..bcb96eb 100644 --- ql/src/test/results/clientpositive/groupby3.q.out +++ ql/src/test/results/clientpositive/groupby3.q.out @@ -130,10 +130,10 @@ STAGE PLANS: type: double expr: _col2 type: double - expr: _col3 - type: string - expr: _col4 - type: string + expr: UDFToDouble(_col3) + type: double + expr: UDFToDouble(_col4) + type: double expr: _col5 type: double expr: _col6 @@ -143,35 +143,14 @@ STAGE PLANS: expr: _col8 type: double outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: double - expr: _col2 - type: double - expr: UDFToDouble(_col3) - type: double - expr: UDFToDouble(_col4) - type: double - expr: _col5 - type: double - expr: _col6 - type: double - expr: _col7 - type: double - expr: _col8 - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby3_map.q.out ql/src/test/results/clientpositive/groupby3_map.q.out index 7506f51..8944f80 100644 --- ql/src/test/results/clientpositive/groupby3_map.q.out +++ ql/src/test/results/clientpositive/groupby3_map.q.out @@ -114,10 +114,10 @@ STAGE PLANS: type: double expr: _col2 type: double - expr: _col3 - type: string - expr: _col4 - type: string + expr: UDFToDouble(_col3) + type: double + expr: UDFToDouble(_col4) + type: double expr: _col5 type: double expr: _col6 @@ -127,35 +127,14 @@ STAGE PLANS: expr: _col8 type: double outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: double - expr: _col2 - type: double - expr: UDFToDouble(_col3) - type: double - expr: UDFToDouble(_col4) - type: double - expr: _col5 - type: double - expr: _col6 - type: double - expr: _col7 - type: double - expr: _col8 - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out index f255f8f..b212c90 100644 --- ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby3_map_multi_distinct.q.out @@ -126,10 +126,10 @@ STAGE PLANS: type: double expr: _col2 type: double - expr: _col3 - type: string - expr: _col4 - type: string + expr: UDFToDouble(_col3) + type: double + expr: UDFToDouble(_col4) + type: double expr: _col5 type: double expr: _col6 @@ -140,42 +140,17 @@ STAGE PLANS: type: double expr: _col9 type: double - expr: _col10 - type: bigint + expr: UDFToDouble(_col10) + type: double outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: double - expr: _col2 - type: double - expr: UDFToDouble(_col3) - type: double - expr: UDFToDouble(_col4) - type: double - expr: _col5 - type: double - expr: _col6 - type: double - expr: _col7 - type: double - expr: _col8 - type: double - expr: _col9 - type: double - expr: UDFToDouble(_col10) - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby3_map_skew.q.out ql/src/test/results/clientpositive/groupby3_map_skew.q.out index 1a0964b..6e6aaf4 100644 --- ql/src/test/results/clientpositive/groupby3_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby3_map_skew.q.out @@ -166,10 +166,10 @@ STAGE PLANS: type: double expr: _col2 type: double - expr: _col3 - type: string - expr: _col4 - type: string + expr: UDFToDouble(_col3) + type: double + expr: UDFToDouble(_col4) + type: double expr: _col5 type: double expr: _col6 @@ -179,35 +179,14 @@ STAGE PLANS: expr: _col8 type: double outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: double - expr: _col2 - type: double - expr: UDFToDouble(_col3) - type: double - expr: UDFToDouble(_col4) - type: double - expr: _col5 - type: double - expr: _col6 - type: double - expr: _col7 - type: double - expr: _col8 - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby3_noskew.q.out ql/src/test/results/clientpositive/groupby3_noskew.q.out index 060e8e1..2bd470b 100644 --- ql/src/test/results/clientpositive/groupby3_noskew.q.out +++ ql/src/test/results/clientpositive/groupby3_noskew.q.out @@ -78,10 +78,10 @@ STAGE PLANS: type: double expr: _col2 type: double - expr: _col3 - type: string - expr: _col4 - type: string + expr: UDFToDouble(_col3) + type: double + expr: UDFToDouble(_col4) + type: double expr: _col5 type: double expr: _col6 @@ -91,35 +91,14 @@ STAGE PLANS: expr: _col8 type: double outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: double - expr: _col2 - type: double - expr: UDFToDouble(_col3) - type: double - expr: UDFToDouble(_col4) - type: double - expr: _col5 - type: double - expr: _col6 - type: double - expr: _col7 - type: double - expr: _col8 - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out index cf0a7c3..ae7caea 100644 --- ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby3_noskew_multi_distinct.q.out @@ -84,10 +84,10 @@ STAGE PLANS: type: double expr: _col2 type: double - expr: _col3 - type: string - expr: _col4 - type: string + expr: UDFToDouble(_col3) + type: double + expr: UDFToDouble(_col4) + type: double expr: _col5 type: double expr: _col6 @@ -98,42 +98,17 @@ STAGE PLANS: type: double expr: _col9 type: double - expr: _col10 - type: bigint + expr: UDFToDouble(_col10) + type: double outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: double - expr: _col2 - type: double - expr: UDFToDouble(_col3) - type: double - expr: UDFToDouble(_col4) - type: double - expr: _col5 - type: double - expr: _col6 - type: double - expr: _col7 - type: double - expr: _col8 - type: double - expr: _col9 - type: double - expr: UDFToDouble(_col10) - type: double - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby4_map.q.out ql/src/test/results/clientpositive/groupby4_map.q.out index 3099fd6..ffae1c2 100644 --- ql/src/test/results/clientpositive/groupby4_map.q.out +++ ql/src/test/results/clientpositive/groupby4_map.q.out @@ -46,22 +46,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: bigint + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby4_map_skew.q.out ql/src/test/results/clientpositive/groupby4_map_skew.q.out index 02d0d0d..a579a68 100644 --- ql/src/test/results/clientpositive/groupby4_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby4_map_skew.q.out @@ -46,22 +46,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: bigint + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby5.q.out ql/src/test/results/clientpositive/groupby5.q.out index bb2298c..5758b6e 100644 --- ql/src/test/results/clientpositive/groupby5.q.out +++ ql/src/test/results/clientpositive/groupby5.q.out @@ -95,26 +95,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby5_map.q.out ql/src/test/results/clientpositive/groupby5_map.q.out index e6275a7..b8ab7a3 100644 --- ql/src/test/results/clientpositive/groupby5_map.q.out +++ ql/src/test/results/clientpositive/groupby5_map.q.out @@ -50,22 +50,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: double + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby5_map_skew.q.out ql/src/test/results/clientpositive/groupby5_map_skew.q.out index f65771a..81f20e7 100644 --- ql/src/test/results/clientpositive/groupby5_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby5_map_skew.q.out @@ -50,22 +50,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: double + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby5_noskew.q.out ql/src/test/results/clientpositive/groupby5_noskew.q.out index f7f79d5..9bdf39e 100644 --- ql/src/test/results/clientpositive/groupby5_noskew.q.out +++ ql/src/test/results/clientpositive/groupby5_noskew.q.out @@ -61,26 +61,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby7_map.q.out ql/src/test/results/clientpositive/groupby7_map.q.out index 79c2f0d..a12e3fe 100644 --- ql/src/test/results/clientpositive/groupby7_map.q.out +++ ql/src/test/results/clientpositive/groupby7_map.q.out @@ -98,26 +98,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -160,26 +153,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out index e06cd6d..d7e6040 100644 --- ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby7_map_multi_single_reducer.q.out @@ -67,26 +67,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Group By Operator aggregations: expr: sum(VALUE._col0) @@ -98,26 +91,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby7_map_skew.q.out ql/src/test/results/clientpositive/groupby7_map_skew.q.out index c8afd38..cce662c 100644 --- ql/src/test/results/clientpositive/groupby7_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby7_map_skew.q.out @@ -133,26 +133,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -228,26 +221,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby7_noskew.q.out ql/src/test/results/clientpositive/groupby7_noskew.q.out index 23b516d..4c8f8de 100644 --- ql/src/test/results/clientpositive/groupby7_noskew.q.out +++ ql/src/test/results/clientpositive/groupby7_noskew.q.out @@ -80,26 +80,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -142,26 +135,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby8.q.out ql/src/test/results/clientpositive/groupby8.q.out index 274c9f4..aa5ead8 100644 --- ql/src/test/results/clientpositive/groupby8.q.out +++ ql/src/test/results/clientpositive/groupby8.q.out @@ -110,26 +110,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -172,26 +165,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -973,26 +959,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -1035,26 +1014,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby8_map.q.out ql/src/test/results/clientpositive/groupby8_map.q.out index 0541259..2edda9f 100644 --- ql/src/test/results/clientpositive/groupby8_map.q.out +++ ql/src/test/results/clientpositive/groupby8_map.q.out @@ -110,26 +110,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -172,26 +165,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby8_map_skew.q.out ql/src/test/results/clientpositive/groupby8_map_skew.q.out index 0541259..2edda9f 100644 --- ql/src/test/results/clientpositive/groupby8_map_skew.q.out +++ ql/src/test/results/clientpositive/groupby8_map_skew.q.out @@ -110,26 +110,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -172,26 +165,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby8_noskew.q.out ql/src/test/results/clientpositive/groupby8_noskew.q.out index 0541259..2edda9f 100644 --- ql/src/test/results/clientpositive/groupby8_noskew.q.out +++ ql/src/test/results/clientpositive/groupby8_noskew.q.out @@ -110,26 +110,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -172,26 +165,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby9.q.out ql/src/test/results/clientpositive/groupby9.q.out index 8748dc9..089b3c8 100644 --- ql/src/test/results/clientpositive/groupby9.q.out +++ ql/src/test/results/clientpositive/groupby9.q.out @@ -114,26 +114,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -182,30 +175,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -995,26 +979,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -1063,30 +1040,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col1 - type: string + expr: UDFToInteger(_col1) + type: int expr: _col0 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -1896,26 +1864,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -1964,30 +1925,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -2803,26 +2755,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -2871,30 +2816,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -3744,26 +3680,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -3812,30 +3741,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col1 - type: string + expr: UDFToInteger(_col1) + type: int expr: _col0 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby_cube1.q.out ql/src/test/results/clientpositive/groupby_cube1.q.out index 8cc2471..061a8bb 100644 --- ql/src/test/results/clientpositive/groupby_cube1.q.out +++ ql/src/test/results/clientpositive/groupby_cube1.q.out @@ -710,26 +710,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t2 Stage: Stage-0 Move Operator @@ -827,26 +818,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t3 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t3 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out index 4f8cf09..b35de8b 100644 --- ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out @@ -91,32 +91,25 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: - expr: _col0 - type: string - expr: _col1 - type: string - expr: '0' - type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + expr: '0' + type: string + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -281,30 +274,23 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: - expr: _col0 - type: string - expr: _col1 - type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + mode: hash + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce diff --git ql/src/test/results/clientpositive/groupby_map_ppr.q.out ql/src/test/results/clientpositive/groupby_map_ppr.q.out index ce1e14c..4058cbb 100644 --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out @@ -179,44 +179,35 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,c1,c2 - columns.types string:int:string + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,c1,c2 + columns.types string:int:string #### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { string key, i32 c1, string c2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name default.dest1 + serialization.ddl struct dest1 { string key, i32 c1, string c2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /srcpart/ds=2008-04-08/hr=11 [src] /srcpart/ds=2008-04-08/hr=12 [src] diff --git ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out index 8d7bb7e..5402136 100644 --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out @@ -191,52 +191,39 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,c1,c2,c3,c4 - columns.types string:int:string:int:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,c1,c2,c3,c4 + columns.types string:int:string:int:int #### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name default.dest1 + serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /srcpart/ds=2008-04-08/hr=11 [src] /srcpart/ds=2008-04-08/hr=12 [src] diff --git ql/src/test/results/clientpositive/groupby_multi_single_reducer.q.out ql/src/test/results/clientpositive/groupby_multi_single_reducer.q.out index 7830697..779c48b 100644 --- ql/src/test/results/clientpositive/groupby_multi_single_reducer.q.out +++ ql/src/test/results/clientpositive/groupby_multi_single_reducer.q.out @@ -97,36 +97,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g2 Filter Operator predicate: expr: (KEY._col0 < 5.0) @@ -147,36 +134,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g3 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g3 Group By Operator aggregations: expr: count(DISTINCT KEY._col1:1._col0) @@ -193,36 +167,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g4 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g4 Stage: Stage-0 Move Operator @@ -498,36 +459,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g2 Filter Operator predicate: expr: (KEY._col0 < 5.0) @@ -548,36 +496,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g3 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g3 Group By Operator aggregations: expr: count(DISTINCT KEY._col1:1._col0) @@ -594,36 +529,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g4 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g4 Stage: Stage-0 Move Operator @@ -742,36 +664,23 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int expr: concat(_col0, _col3) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 5 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_h3 + File Output Operator + compressed: false + GlobalTableId: 5 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_h3 Stage: Stage-10 Map Reduce diff --git ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out index 06d47f9..896002e 100644 --- ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out +++ ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out @@ -79,24 +79,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g2 Filter Operator predicate: expr: (KEY._col0 < 5.0) @@ -115,28 +108,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_g3 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_g3 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby_mutli_insert_common_distinct.q.out ql/src/test/results/clientpositive/groupby_mutli_insert_common_distinct.q.out index 14acca0..fdf577d 100644 --- ql/src/test/results/clientpositive/groupby_mutli_insert_common_distinct.q.out +++ ql/src/test/results/clientpositive/groupby_mutli_insert_common_distinct.q.out @@ -112,26 +112,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -174,26 +167,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -365,26 +351,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -429,26 +408,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby_position.q.out ql/src/test/results/clientpositive/groupby_position.q.out index 460b00f..535c5d8 100644 --- ql/src/test/results/clientpositive/groupby_position.q.out +++ ql/src/test/results/clientpositive/groupby_position.q.out @@ -118,26 +118,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.testtable1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.testtable1 Stage: Stage-0 Move Operator @@ -188,30 +181,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.testtable2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.testtable2 Stage: Stage-1 Move Operator @@ -409,26 +393,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.testtable1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.testtable1 Stage: Stage-0 Move Operator @@ -479,30 +456,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col1 - type: string + expr: UDFToInteger(_col1) + type: int expr: _col0 type: string expr: _col2 type: bigint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: bigint - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.testtable2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.testtable2 Stage: Stage-1 Move Operator @@ -690,19 +658,12 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -951,23 +912,12 @@ STAGE PLANS: expr: _col4 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-3 Map Reduce diff --git ql/src/test/results/clientpositive/groupby_ppd.q.out ql/src/test/results/clientpositive/groupby_ppd.q.out index ef1a05c..2512b99 100644 --- ql/src/test/results/clientpositive/groupby_ppd.q.out +++ ql/src/test/results/clientpositive/groupby_ppd.q.out @@ -42,35 +42,28 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int + Group By Operator + bucketGroup: false + keys: expr: _col1 type: int + expr: _col0 + type: int + mode: hash outputColumnNames: _col0, _col1 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: + expr: _col0 + type: int expr: _col1 type: int + sort order: ++ + Map-reduce partition columns: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: int - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: int - expr: _col1 - type: int - tag: -1 + expr: _col1 + type: int + tag: -1 a-subquery2:b-subquery2:d TableScan alias: d @@ -93,35 +86,28 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int + Group By Operator + bucketGroup: false + keys: expr: _col1 type: int + expr: _col0 + type: int + mode: hash outputColumnNames: _col0, _col1 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: + expr: _col0 + type: int expr: _col1 type: int + sort order: ++ + Map-reduce partition columns: expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: int - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: int - expr: _col1 - type: int - tag: -1 + expr: _col1 + type: int + tag: -1 Reduce Operator Tree: Group By Operator bucketGroup: false diff --git ql/src/test/results/clientpositive/groupby_ppr.q.out ql/src/test/results/clientpositive/groupby_ppr.q.out index 8879e3c..0b1fb51 100644 --- ql/src/test/results/clientpositive/groupby_ppr.q.out +++ ql/src/test/results/clientpositive/groupby_ppr.q.out @@ -162,44 +162,35 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,c1,c2 - columns.types string:int:string + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,c1,c2 + columns.types string:int:string #### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { string key, i32 c1, string c2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name default.dest1 + serialization.ddl struct dest1 { string key, i32 c1, string c2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /srcpart/ds=2008-04-08/hr=11 [src] /srcpart/ds=2008-04-08/hr=12 [src] diff --git ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out index 83fb64c..e4fa6c5 100644 --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out @@ -166,52 +166,39 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int expr: concat(_col0, _col2) type: string - expr: _col3 - type: double - expr: _col4 - type: bigint + expr: UDFToInteger(_col3) + type: int + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,c1,c2,c3,c4 - columns.types string:int:string:int:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,c1,c2,c3,c4 + columns.types string:int:string:int:int #### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name default.dest1 + serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /srcpart/ds=2008-04-08/hr=11 [src] /srcpart/ds=2008-04-08/hr=12 [src] diff --git ql/src/test/results/clientpositive/groupby_rollup1.q.out ql/src/test/results/clientpositive/groupby_rollup1.q.out index bf4e0e4..9c3947f 100644 --- ql/src/test/results/clientpositive/groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/groupby_rollup1.q.out @@ -698,26 +698,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t2 Stage: Stage-0 Move Operator @@ -815,26 +806,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t3 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t3 Stage: Stage-1 Move Operator diff --git ql/src/test/results/clientpositive/groupby_sort_1.q.out ql/src/test/results/clientpositive/groupby_sort_1.q.out index b839451..e6f3a7a 100644 --- ql/src/test/results/clientpositive/groupby_sort_1.q.out +++ ql/src/test/results/clientpositive/groupby_sort_1.q.out @@ -83,42 +83,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -514,46 +507,37 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,cnt - columns.types int:string:int -#### A masked pattern was here #### - name default.outputtbl2 - serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl2 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,cnt + columns.types int:string:int +#### A masked pattern was here #### + name default.outputtbl2 + serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl2 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1 [t1] @@ -665,63 +649,51 @@ STAGE PLANS: expr: key type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: final + mode: final + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int #### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -1070,63 +1042,51 @@ STAGE PLANS: expr: key type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: final + mode: final + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int #### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -1514,44 +1474,35 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,cnt - columns.types int:int:int -#### A masked pattern was here #### - name default.outputtbl3 - serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,cnt + columns.types int:int:int +#### A masked pattern was here #### + name default.outputtbl3 + serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -1995,50 +1946,39 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int -#### A masked pattern was here #### - name default.outputtbl4 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int +#### A masked pattern was here #### + name default.outputtbl4 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1 [t1] @@ -2269,51 +2209,42 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: double - expr: _col2 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,cnt - columns.types int:int:int -#### A masked pattern was here #### - name default.outputtbl3 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 25 - serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,cnt + columns.types int:int:int +#### A masked pattern was here #### + name default.outputtbl3 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 25 + serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 30 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1 [t1] @@ -2492,34 +2423,27 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + Group By Operator + aggregations: + expr: sum(_col1) + bucketGroup: false + keys: + expr: (_col0 + _col0) + type: double + mode: hash outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: sum(_col1) - bucketGroup: false - keys: - expr: (_col0 + _col0) + Reduce Output Operator + key expressions: + expr: _col0 type: double - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: double - sort order: + - Map-reduce partition columns: - expr: _col0 - type: double - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: double + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -2582,47 +2506,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1 [subq1:t1] @@ -2817,47 +2734,40 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 17 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 22 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false null-subquery2:subq1-subquery2:t1 TableScan alias: t1 @@ -2886,47 +2796,40 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 17 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 22 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -3460,47 +3363,40 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 10 + rawDataSize 30 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 40 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false null-subquery1:subq1-subquery1:t1 TableScan alias: t1 @@ -3521,62 +3417,48 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToDouble(_col0) + type: double expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Union + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 10 + rawDataSize 30 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 40 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -4140,47 +4022,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col3 Select Operator expressions: - expr: _col0 - type: string - expr: (_col1 + _col3) - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger((_col1 + _col3)) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 10 - rawDataSize 32 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 42 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 10 + rawDataSize 32 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 42 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1 [subq1:t1, subq2:t1] @@ -4884,47 +4759,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t2 [t2] @@ -5137,55 +5005,44 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int -#### A masked pattern was here #### - name default.outputtbl4 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int +#### A masked pattern was here #### + name default.outputtbl4 + numFiles 1 + numPartitions 0 + numRows 6 + rawDataSize 48 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 54 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -5682,54 +5539,41 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string expr: _col3 type: int - expr: _col4 - type: bigint + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,key4,cnt - columns.types int:int:string:int:int -#### A masked pattern was here #### - name default.outputtbl5 - serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl5 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,key4,cnt + columns.types int:int:string:int:int +#### A masked pattern was here #### + name default.outputtbl5 + serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl5 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -6153,79 +5997,59 @@ STAGE PLANS: expr: val type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string expr: _col1 type: int expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string + mode: final + outputColumnNames: _col0, _col1, _col2, _col3 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string - mode: final + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: bigint - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int #### A masked pattern was here #### - name default.outputtbl4 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 + name default.outputtbl4 + numFiles 1 + numPartitions 0 + numRows 6 + rawDataSize 48 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 54 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -6699,91 +6523,64 @@ STAGE PLANS: expressions: expr: key type: string + expr: 2 + type: int expr: val type: string - outputColumnNames: _col0, _col2 - Select Operator - expressions: + outputColumnNames: _col0, _col3, _col2 + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string + expr: _col3 + type: int expr: _col2 type: string - expr: 2 - type: int - outputColumnNames: _col0, _col2, _col3 + mode: final + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string - expr: _col3 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: int expr: _col2 type: string - outputColumnNames: _col0, _col3, _col2 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string - expr: _col3 - type: int - expr: _col2 - type: string - mode: final - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: bigint - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int -#### A masked pattern was here #### - name default.outputtbl4 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: UDFToInteger(_col3) + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int +#### A masked pattern was here #### + name default.outputtbl4 + numFiles 1 + numPartitions 0 + numRows 6 + rawDataSize 48 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 54 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -7413,30 +7210,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Reduce Operator Tree: Group By Operator aggregations: @@ -7449,26 +7237,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -7844,30 +7625,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Reduce Operator Tree: Group By Operator aggregations: @@ -7880,26 +7652,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby_sort_2.q.out ql/src/test/results/clientpositive/groupby_sort_2.q.out index 89b4fc0..c20b757 100644 --- ql/src/test/results/clientpositive/groupby_sort_2.q.out +++ ql/src/test/results/clientpositive/groupby_sort_2.q.out @@ -99,24 +99,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby_sort_3.q.out ql/src/test/results/clientpositive/groupby_sort_3.q.out index 3eacf67..7a6a809 100644 --- ql/src/test/results/clientpositive/groupby_sort_3.q.out +++ ql/src/test/results/clientpositive/groupby_sort_3.q.out @@ -86,26 +86,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 Stage: Stage-7 Conditional Operator @@ -259,24 +250,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl2 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/groupby_sort_4.q.out ql/src/test/results/clientpositive/groupby_sort_4.q.out index 1b32f78..95ff90a 100644 --- ql/src/test/results/clientpositive/groupby_sort_4.q.out +++ ql/src/test/results/clientpositive/groupby_sort_4.q.out @@ -99,24 +99,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 Stage: Stage-0 Move Operator @@ -255,26 +248,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby_sort_5.q.out ql/src/test/results/clientpositive/groupby_sort_5.q.out index 533f361..23cfec3 100644 --- ql/src/test/results/clientpositive/groupby_sort_5.q.out +++ ql/src/test/results/clientpositive/groupby_sort_5.q.out @@ -90,26 +90,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 Stage: Stage-7 Conditional Operator @@ -318,26 +309,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 Stage: Stage-7 Conditional Operator @@ -611,24 +593,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/groupby_sort_6.q.out ql/src/test/results/clientpositive/groupby_sort_6.q.out index 54125d0..49ea4ac 100644 --- ql/src/test/results/clientpositive/groupby_sort_6.q.out +++ ql/src/test/results/clientpositive/groupby_sort_6.q.out @@ -78,42 +78,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Stage: Stage-0 Move Operator @@ -245,47 +238,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Stage: Stage-0 Move Operator @@ -464,47 +450,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1/ds=2 [t1] diff --git ql/src/test/results/clientpositive/groupby_sort_7.q.out ql/src/test/results/clientpositive/groupby_sort_7.q.out index 100bb05..1c061ae 100644 --- ql/src/test/results/clientpositive/groupby_sort_7.q.out +++ ql/src/test/results/clientpositive/groupby_sort_7.q.out @@ -93,26 +93,17 @@ STAGE PLANS: type: string expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out index d970008..b7ca0ee 100644 --- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out +++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out @@ -83,42 +83,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -583,46 +576,37 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,cnt - columns.types int:string:int -#### A masked pattern was here #### - name default.outputtbl2 - serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl2 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,cnt + columns.types int:string:int +#### A masked pattern was here #### + name default.outputtbl2 + serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl2 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: #### A masked pattern was here #### @@ -734,63 +718,51 @@ STAGE PLANS: expr: key type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: final + mode: final + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int #### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -1139,63 +1111,51 @@ STAGE PLANS: expr: key type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: final + mode: final + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int #### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -1583,44 +1543,35 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,cnt - columns.types int:int:int -#### A masked pattern was here #### - name default.outputtbl3 - serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,cnt + columns.types int:int:int +#### A masked pattern was here #### + name default.outputtbl3 + serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -2137,50 +2088,39 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int -#### A masked pattern was here #### - name default.outputtbl4 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int +#### A masked pattern was here #### + name default.outputtbl4 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: #### A masked pattern was here #### @@ -2480,51 +2420,42 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: double - expr: _col2 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,cnt - columns.types int:int:int -#### A masked pattern was here #### - name default.outputtbl3 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 25 - serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,cnt + columns.types int:int:int +#### A masked pattern was here #### + name default.outputtbl3 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 25 + serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 30 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: #### A masked pattern was here #### @@ -2704,34 +2635,27 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + Group By Operator + aggregations: + expr: sum(_col1) + bucketGroup: false + keys: + expr: (_col0 + _col0) + type: double + mode: hash outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: sum(_col1) - bucketGroup: false - keys: - expr: (_col0 + _col0) + Reduce Output Operator + key expressions: + expr: _col0 type: double - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: double - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -2858,47 +2782,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: #### A masked pattern was here #### @@ -3093,47 +3010,40 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 17 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 22 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false null-subquery2:subq1-subquery2:t1 TableScan alias: t1 @@ -3162,47 +3072,40 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 17 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 22 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 17 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 22 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -3801,47 +3704,40 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 10 + rawDataSize 30 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 40 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false null-subquery1:subq1-subquery1:t1 TableScan alias: t1 @@ -3862,62 +3758,48 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToDouble(_col0) + type: double expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: double - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 10 - rawDataSize 30 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 40 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Union + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 10 + rawDataSize 30 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 40 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -4481,47 +4363,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col3 Select Operator expressions: - expr: _col0 - type: string - expr: (_col1 + _col3) - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger((_col1 + _col3)) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 10 - rawDataSize 32 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 42 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 10 + rawDataSize 32 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 42 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /t1 [subq1:t1, subq2:t1] @@ -5359,47 +5234,40 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,cnt - columns.types int:int -#### A masked pattern was here #### - name default.outputtbl1 - numFiles 1 - numPartitions 0 - numRows 5 - rawDataSize 15 - serialization.ddl struct outputtbl1 { i32 key, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 20 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,cnt + columns.types int:int +#### A masked pattern was here #### + name default.outputtbl1 + numFiles 1 + numPartitions 0 + numRows 5 + rawDataSize 15 + serialization.ddl struct outputtbl1 { i32 key, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 20 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: #### A masked pattern was here #### @@ -5612,55 +5480,44 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int -#### A masked pattern was here #### - name default.outputtbl4 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int +#### A masked pattern was here #### + name default.outputtbl4 + numFiles 1 + numPartitions 0 + numRows 6 + rawDataSize 48 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 54 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -6157,54 +6014,41 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string expr: _col3 type: int - expr: _col4 - type: bigint + expr: UDFToInteger(_col4) + type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: int - expr: UDFToInteger(_col4) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,key4,cnt - columns.types int:int:string:int:int -#### A masked pattern was here #### - name default.outputtbl5 - serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl5 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,key4,cnt + columns.types int:int:string:int:int +#### A masked pattern was here #### + name default.outputtbl5 + serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl5 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -6628,79 +6472,59 @@ STAGE PLANS: expr: val type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string expr: _col1 type: int expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string + mode: final + outputColumnNames: _col0, _col1, _col2, _col3 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int expr: _col1 type: int expr: _col2 type: string - mode: final + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: bigint - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int #### A masked pattern was here #### - name default.outputtbl4 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 + name default.outputtbl4 + numFiles 1 + numPartitions 0 + numRows 6 + rawDataSize 48 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 54 #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -7174,91 +6998,64 @@ STAGE PLANS: expressions: expr: key type: string + expr: 2 + type: int expr: val type: string - outputColumnNames: _col0, _col2 - Select Operator - expressions: + outputColumnNames: _col0, _col3, _col2 + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string + expr: _col3 + type: int expr: _col2 type: string - expr: 2 - type: int - outputColumnNames: _col0, _col2, _col3 + mode: final + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string - expr: _col3 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: int expr: _col2 type: string - outputColumnNames: _col0, _col3, _col2 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: _col0 - type: string - expr: _col3 - type: int - expr: _col2 - type: string - mode: final - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: bigint - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,key2,key3,cnt - columns.types int:int:string:int -#### A masked pattern was here #### - name default.outputtbl4 - numFiles 1 - numPartitions 0 - numRows 6 - rawDataSize 48 - serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 54 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.outputtbl4 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: UDFToInteger(_col3) + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,key2,key3,cnt + columns.types int:int:string:int +#### A masked pattern was here #### + name default.outputtbl4 + numFiles 1 + numPartitions 0 + numRows 6 + rawDataSize 48 + serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 54 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.outputtbl4 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -7889,30 +7686,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Reduce Operator Tree: Group By Operator aggregations: @@ -7958,26 +7746,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -8354,30 +8135,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: true - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: true + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Reduce Operator Tree: Group By Operator aggregations: @@ -8423,26 +8195,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: true - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: true + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/index_auto_mult_tables.q.out ql/src/test/results/clientpositive/index_auto_mult_tables.q.out index 63912ac..4af46e7 100644 --- ql/src/test/results/clientpositive/index_auto_mult_tables.q.out +++ ql/src/test/results/clientpositive/index_auto_mult_tables.q.out @@ -278,44 +278,28 @@ STAGE PLANS: type: string expr: _offset type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: @@ -461,44 +445,28 @@ STAGE PLANS: type: string expr: _offset type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: expr: _col0 type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/index_auto_self_join.q.out ql/src/test/results/clientpositive/index_auto_self_join.q.out index 62b256b..492008c 100644 --- ql/src/test/results/clientpositive/index_auto_self_join.q.out +++ ql/src/test/results/clientpositive/index_auto_self_join.q.out @@ -185,44 +185,28 @@ STAGE PLANS: type: string expr: _offset type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: @@ -366,44 +350,28 @@ STAGE PLANS: type: string expr: _offset type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: expr: _col0 type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out index 5281795..a014e39 100644 --- ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out +++ ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out @@ -68,44 +68,28 @@ STAGE PLANS: type: string expr: _offset type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: expr: _col0 type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/index_bitmap_compression.q.out ql/src/test/results/clientpositive/index_bitmap_compression.q.out index 53d5fa9..abb0b09 100644 --- ql/src/test/results/clientpositive/index_bitmap_compression.q.out +++ ql/src/test/results/clientpositive/index_bitmap_compression.q.out @@ -54,44 +54,28 @@ STAGE PLANS: type: string expr: _offset type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: expr: _col0 type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out index a3296ec..15380f5 100644 --- ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out +++ ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out @@ -673,35 +673,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - expr: if(((_col0 % 100) = 0), '11', '12') - type: string - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToString(_col1) - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat - output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat - serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - name: default.test_table + expr: UDFToString(_col1) + type: string + expr: if(((_col0 % 100) = 0), '11', '12') + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: default.test_table Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out index 1c9124b..cf4f32d 100644 --- ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out +++ ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out @@ -492,22 +492,15 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.test_table_out + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.test_table_out Stage: Stage-7 Conditional Operator @@ -688,32 +681,27 @@ STAGE PLANS: expr: _col5 type: string outputColumnNames: _col5 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col5 type: string - outputColumnNames: _col5 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: - expr: _col5 + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out index 6b845bd..7cf4081 100644 --- ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out +++ ql/src/test/results/clientpositive/infer_bucket_sort_num_buckets.q.out @@ -66,31 +66,22 @@ STAGE PLANS: type: int expr: value type: string - expr: (key % 2) - type: double + expr: UDFToInteger((key % 2)) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string - expr: UDFToInteger(_col2) + expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: int Reduce Operator Tree: Extract File Output Operator diff --git ql/src/test/results/clientpositive/input11.q.out ql/src/test/results/clientpositive/input11.q.out index fbd2cec..a55c05d 100644 --- ql/src/test/results/clientpositive/input11.q.out +++ ql/src/test/results/clientpositive/input11.q.out @@ -37,26 +37,19 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input12_hadoop20.q.out ql/src/test/results/clientpositive/input12_hadoop20.q.out index b071f90..c69d52e 100644 --- ql/src/test/results/clientpositive/input12_hadoop20.q.out +++ ql/src/test/results/clientpositive/input12_hadoop20.q.out @@ -69,74 +69,55 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Filter Operator predicate: expr: ((key >= 100.0) and (key < 200.0)) type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Filter Operator predicate: expr: (key >= 200.0) type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest3 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest3 Stage: Stage-9 Conditional Operator diff --git ql/src/test/results/clientpositive/input13.q.out ql/src/test/results/clientpositive/input13.q.out index 76b9097..6c3f1c4 100644 --- ql/src/test/results/clientpositive/input13.q.out +++ ql/src/test/results/clientpositive/input13.q.out @@ -73,74 +73,55 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Filter Operator predicate: expr: ((key >= 100.0) and (key < 200.0)) type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Filter Operator predicate: expr: ((key >= 200.0) and (key < 300.0)) type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest3 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest3 Filter Operator predicate: expr: (key >= 300.0) diff --git ql/src/test/results/clientpositive/input14.q.out ql/src/test/results/clientpositive/input14.q.out index 1b3fab3..8dfa426 100644 --- ql/src/test/results/clientpositive/input14.q.out +++ ql/src/test/results/clientpositive/input14.q.out @@ -70,26 +70,19 @@ STAGE PLANS: Extract Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input14_limit.q.out ql/src/test/results/clientpositive/input14_limit.q.out index eb92384..bbe5d1b 100644 --- ql/src/test/results/clientpositive/input14_limit.q.out +++ ql/src/test/results/clientpositive/input14_limit.q.out @@ -100,26 +100,19 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input17.q.out ql/src/test/results/clientpositive/input17.q.out index e31a3e6..db02dfa 100644 --- ql/src/test/results/clientpositive/input17.q.out +++ ql/src/test/results/clientpositive/input17.q.out @@ -66,26 +66,19 @@ STAGE PLANS: Extract Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input18.q.out ql/src/test/results/clientpositive/input18.q.out index 51722cc..855f053 100644 --- ql/src/test/results/clientpositive/input18.q.out +++ ql/src/test/results/clientpositive/input18.q.out @@ -74,26 +74,19 @@ STAGE PLANS: Extract Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: regexp_replace(_col1, ' ', '+') type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input22.q.out ql/src/test/results/clientpositive/input22.q.out index 73e0339..be211ca 100644 --- ql/src/test/results/clientpositive/input22.q.out +++ ql/src/test/results/clientpositive/input22.q.out @@ -39,21 +39,16 @@ STAGE PLANS: expressions: expr: key type: string - outputColumnNames: _col2 - Select Operator - expressions: - expr: _col2 + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string Reduce Operator Tree: Extract Limit diff --git ql/src/test/results/clientpositive/input30.q.out ql/src/test/results/clientpositive/input30.q.out index c1bcabd..a20557a 100644 --- ql/src/test/results/clientpositive/input30.q.out +++ ql/src/test/results/clientpositive/input30.q.out @@ -57,22 +57,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: bigint + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tst_dest30 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tst_dest30 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input31.q.out ql/src/test/results/clientpositive/input31.q.out index 9f0ba72..60627bb 100644 --- ql/src/test/results/clientpositive/input31.q.out +++ ql/src/test/results/clientpositive/input31.q.out @@ -57,22 +57,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: bigint + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tst_dest31 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tst_dest31 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input32.q.out ql/src/test/results/clientpositive/input32.q.out index bb47d89..5fabec9 100644 --- ql/src/test/results/clientpositive/input32.q.out +++ ql/src/test/results/clientpositive/input32.q.out @@ -53,22 +53,17 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: _col0 - type: bigint + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tst_dest32 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tst_dest32 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/input34.q.out ql/src/test/results/clientpositive/input34.q.out index 8c462ad..876b6ca 100644 --- ql/src/test/results/clientpositive/input34.q.out +++ ql/src/test/results/clientpositive/input34.q.out @@ -55,26 +55,19 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input35.q.out ql/src/test/results/clientpositive/input35.q.out index c3c8ede..9a0ea31 100644 --- ql/src/test/results/clientpositive/input35.q.out +++ ql/src/test/results/clientpositive/input35.q.out @@ -55,26 +55,19 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input36.q.out ql/src/test/results/clientpositive/input36.q.out index 961d0ba..a275ccb 100644 --- ql/src/test/results/clientpositive/input36.q.out +++ ql/src/test/results/clientpositive/input36.q.out @@ -55,26 +55,19 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input7.q.out ql/src/test/results/clientpositive/input7.q.out index 93a295a..ae89e27 100644 --- ql/src/test/results/clientpositive/input7.q.out +++ ql/src/test/results/clientpositive/input7.q.out @@ -33,26 +33,19 @@ STAGE PLANS: alias: src1 Select Operator expressions: - expr: null - type: string - expr: key - type: string + expr: UDFToDouble(null) + type: double + expr: UDFToInteger(key) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input8.q.out ql/src/test/results/clientpositive/input8.q.out index ccf995b..1b4ef58 100644 --- ql/src/test/results/clientpositive/input8.q.out +++ ql/src/test/results/clientpositive/input8.q.out @@ -35,28 +35,19 @@ STAGE PLANS: expressions: expr: (4 + null) type: int - expr: (key - null) + expr: UDFToInteger((key - null)) + type: int + expr: UDFToDouble((null + null)) type: double - expr: (null + null) - type: tinyint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToDouble(_col2) - type: double - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input9.q.out ql/src/test/results/clientpositive/input9.q.out index 76d2747..fa55649 100644 --- ql/src/test/results/clientpositive/input9.q.out +++ ql/src/test/results/clientpositive/input9.q.out @@ -39,24 +39,17 @@ STAGE PLANS: expressions: expr: null type: string - expr: key - type: string + expr: UDFToInteger(key) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: void - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/input_part1.q.out ql/src/test/results/clientpositive/input_part1.q.out index fa24c53..a8222d4 100644 --- ql/src/test/results/clientpositive/input_part1.q.out +++ ql/src/test/results/clientpositive/input_part1.q.out @@ -39,8 +39,8 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: hr @@ -48,41 +48,30 @@ STAGE PLANS: expr: ds type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,hr,ds - columns.types int:string:string:string -#### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,hr,ds + columns.types int:string:string:string +#### A masked pattern was here #### + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/input_part2.q.out ql/src/test/results/clientpositive/input_part2.q.out index 1a40058..7f8dd01 100644 --- ql/src/test/results/clientpositive/input_part2.q.out +++ ql/src/test/results/clientpositive/input_part2.q.out @@ -53,8 +53,8 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: hr @@ -62,41 +62,30 @@ STAGE PLANS: expr: ds type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,hr,ds - columns.types int:string:string:string -#### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,hr,ds + columns.types int:string:string:string +#### A masked pattern was here #### + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Filter Operator isSamplingPred: false predicate: @@ -104,8 +93,8 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: hr @@ -113,41 +102,30 @@ STAGE PLANS: expr: ds type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 2 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,hr,ds - columns.types int:string:string:string -#### A masked pattern was here #### - name default.dest2 - serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 2 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,hr,ds + columns.types int:string:string:string +#### A masked pattern was here #### + name default.dest2 + serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/input_testsequencefile.q.out ql/src/test/results/clientpositive/input_testsequencefile.q.out index e800641..da790fa 100644 --- ql/src/test/results/clientpositive/input_testsequencefile.q.out +++ ql/src/test/results/clientpositive/input_testsequencefile.q.out @@ -33,26 +33,19 @@ STAGE PLANS: alias: src Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest4_sequencefile + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest4_sequencefile Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/join1.q.out ql/src/test/results/clientpositive/join1.q.out index 2f63dd3..88b5a11 100644 --- ql/src/test/results/clientpositive/join1.q.out +++ ql/src/test/results/clientpositive/join1.q.out @@ -64,26 +64,19 @@ STAGE PLANS: outputColumnNames: _col0, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join12.q.out ql/src/test/results/clientpositive/join12.q.out index 6307d01..0e30e8b 100644 --- ql/src/test/results/clientpositive/join12.q.out +++ ql/src/test/results/clientpositive/join12.q.out @@ -36,7 +36,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 100.0) and (key < 80.0)) + expr: ((key < 80.0) and (key < 100.0)) type: boolean Select Operator expressions: @@ -86,7 +86,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 80.0) and (key < 100.0)) + expr: ((key < 100.0) and (key < 80.0)) type: boolean Select Operator expressions: diff --git ql/src/test/results/clientpositive/join14_hadoop20.q.out ql/src/test/results/clientpositive/join14_hadoop20.q.out index ea0e274..01f63df 100644 --- ql/src/test/results/clientpositive/join14_hadoop20.q.out +++ ql/src/test/results/clientpositive/join14_hadoop20.q.out @@ -76,26 +76,19 @@ STAGE PLANS: outputColumnNames: _col0, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join17.q.out ql/src/test/results/clientpositive/join17.q.out index 01f282b..a20acaf 100644 --- ql/src/test/results/clientpositive/join17.q.out +++ ql/src/test/results/clientpositive/join17.q.out @@ -116,50 +116,39 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key1,value1,key2,value2 - columns.types int:string:int:string + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key1,value1,key2,value2 + columns.types int:string:int:string #### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name default.dest1 + serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /src [src2, src1] diff --git ql/src/test/results/clientpositive/join2.q.out ql/src/test/results/clientpositive/join2.q.out index d9e6a96..21be94c 100644 --- ql/src/test/results/clientpositive/join2.q.out +++ ql/src/test/results/clientpositive/join2.q.out @@ -112,26 +112,19 @@ STAGE PLANS: outputColumnNames: _col4, _col9 Select Operator expressions: - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j2 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join20.q.out ql/src/test/results/clientpositive/join20.q.out index a735d36..c458e25 100644 --- ql/src/test/results/clientpositive/join20.q.out +++ ql/src/test/results/clientpositive/join20.q.out @@ -744,7 +744,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 15.0)) + expr: ((key < 15.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -765,7 +765,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key < 15.0) and (key < 10.0)) + expr: ((key < 10.0) and (key < 15.0)) type: boolean Reduce Output Operator key expressions: diff --git ql/src/test/results/clientpositive/join22.q.out ql/src/test/results/clientpositive/join22.q.out index 606d735..c205cd3 100644 --- ql/src/test/results/clientpositive/join22.q.out +++ ql/src/test/results/clientpositive/join22.q.out @@ -109,18 +109,13 @@ STAGE PLANS: expressions: expr: _col7 type: string - outputColumnNames: _col3 - Select Operator - expressions: - expr: _col3 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/join25.q.out ql/src/test/results/clientpositive/join25.q.out index 33b0065..44aebbe 100644 --- ql/src/test/results/clientpositive/join25.q.out +++ ql/src/test/results/clientpositive/join25.q.out @@ -68,39 +68,21 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/join26.q.out ql/src/test/results/clientpositive/join26.q.out index 80b373a..ecdbda6 100644 --- ql/src/test/results/clientpositive/join26.q.out +++ ql/src/test/results/clientpositive/join26.q.out @@ -97,44 +97,35 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col5 - type: string expr: _col9 type: string - outputColumnNames: _col0, _col5, _col9 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string -#### A masked pattern was here #### - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string +#### A masked pattern was here #### + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/join27.q.out ql/src/test/results/clientpositive/join27.q.out index 048f474..04ddbb6 100644 --- ql/src/test/results/clientpositive/join27.q.out +++ ql/src/test/results/clientpositive/join27.q.out @@ -68,39 +68,21 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/join29.q.out ql/src/test/results/clientpositive/join29.q.out index 086e334..6f08274 100644 --- ql/src/test/results/clientpositive/join29.q.out +++ ql/src/test/results/clientpositive/join29.q.out @@ -132,28 +132,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - expr: _col3 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work @@ -208,28 +199,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - expr: _col3 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work @@ -277,28 +259,19 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - expr: _col3 - type: bigint + expr: UDFToInteger(_col1) + type: int + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Stage: Stage-4 Map Reduce diff --git ql/src/test/results/clientpositive/join3.q.out ql/src/test/results/clientpositive/join3.q.out index f8ccebf..f4aab0a 100644 --- ql/src/test/results/clientpositive/join3.q.out +++ ql/src/test/results/clientpositive/join3.q.out @@ -78,26 +78,19 @@ STAGE PLANS: outputColumnNames: _col0, _col9 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col9 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join30.q.out ql/src/test/results/clientpositive/join30.q.out index 9164035..468c7ba 100644 --- ql/src/test/results/clientpositive/join30.q.out +++ ql/src/test/results/clientpositive/join30.q.out @@ -64,32 +64,27 @@ STAGE PLANS: expr: _col0 type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -104,26 +99,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join31.q.out ql/src/test/results/clientpositive/join31.q.out index 763f2c5..ac3abca 100644 --- ql/src/test/results/clientpositive/join31.q.out +++ ql/src/test/results/clientpositive/join31.q.out @@ -182,24 +182,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join35.q.out ql/src/test/results/clientpositive/join35.q.out index 630e710..7074628 100644 --- ql/src/test/results/clientpositive/join35.q.out +++ ql/src/test/results/clientpositive/join35.q.out @@ -211,42 +211,33 @@ STAGE PLANS: type: string expr: _col3 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:int -#### A masked pattern was here #### - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, i32 val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:int +#### A masked pattern was here #### + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, i32 val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false #### A masked pattern was here #### TableScan GatherStats: false @@ -269,42 +260,33 @@ STAGE PLANS: type: string expr: _col3 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:int -#### A masked pattern was here #### - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, i32 val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:int +#### A masked pattern was here #### + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, i32 val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: true @@ -476,42 +458,33 @@ STAGE PLANS: type: string expr: _col3 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:int -#### A masked pattern was here #### - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, i32 val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:int +#### A masked pattern was here #### + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, i32 val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: true @@ -741,42 +714,33 @@ STAGE PLANS: type: string expr: _col3 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:int -#### A masked pattern was here #### - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, i32 val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:int +#### A masked pattern was here #### + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, i32 val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /src1 [x] #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/join36.q.out ql/src/test/results/clientpositive/join36.q.out index 986beef..da3d6c1 100644 --- ql/src/test/results/clientpositive/join36.q.out +++ ql/src/test/results/clientpositive/join36.q.out @@ -114,24 +114,15 @@ STAGE PLANS: type: int expr: _col5 type: int - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col5 - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/join37.q.out ql/src/test/results/clientpositive/join37.q.out index 5d46406..2074beb 100644 --- ql/src/test/results/clientpositive/join37.q.out +++ ql/src/test/results/clientpositive/join37.q.out @@ -68,39 +68,21 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/join38.q.out ql/src/test/results/clientpositive/join38.q.out index 4e2494b..187bf13 100644 --- ql/src/test/results/clientpositive/join38.q.out +++ ql/src/test/results/clientpositive/join38.q.out @@ -130,43 +130,34 @@ STAGE PLANS: type: string expr: _col9 type: string - expr: _col15 - type: string - outputColumnNames: _col1, _col9, _col15 - Select Operator - expressions: + outputColumnNames: _col1, _col9 + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col1 type: string expr: _col9 type: string - outputColumnNames: _col1, _col9 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string expr: _col1 type: string - expr: _col9 + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + tag: -1 + value expressions: + expr: _col2 + type: bigint Local Work: Map Reduce Local Work Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/join39.q.out ql/src/test/results/clientpositive/join39.q.out index 374da7d..adc3487 100644 --- ql/src/test/results/clientpositive/join39.q.out +++ ql/src/test/results/clientpositive/join39.q.out @@ -87,26 +87,15 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/join4.q.out ql/src/test/results/clientpositive/join4.q.out index 6e18925..563246b 100644 --- ql/src/test/results/clientpositive/join4.q.out +++ ql/src/test/results/clientpositive/join4.q.out @@ -112,45 +112,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join40.q.out ql/src/test/results/clientpositive/join40.q.out index c6f0d39..7e733ed 100644 --- ql/src/test/results/clientpositive/join40.q.out +++ ql/src/test/results/clientpositive/join40.q.out @@ -2527,7 +2527,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 15.0)) + expr: ((key < 15.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: @@ -2548,7 +2548,7 @@ STAGE PLANS: alias: src2 Filter Operator predicate: - expr: ((key < 15.0) and (key < 10.0)) + expr: ((key < 10.0) and (key < 15.0)) type: boolean Reduce Output Operator key expressions: @@ -3299,24 +3299,13 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/join5.q.out ql/src/test/results/clientpositive/join5.q.out index 22c778a..01397e1 100644 --- ql/src/test/results/clientpositive/join5.q.out +++ ql/src/test/results/clientpositive/join5.q.out @@ -112,45 +112,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join6.q.out ql/src/test/results/clientpositive/join6.q.out index 8bb6d74..77deb1f 100644 --- ql/src/test/results/clientpositive/join6.q.out +++ ql/src/test/results/clientpositive/join6.q.out @@ -112,45 +112,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join7.q.out ql/src/test/results/clientpositive/join7.q.out index f508507..76f219d 100644 --- ql/src/test/results/clientpositive/join7.q.out +++ ql/src/test/results/clientpositive/join7.q.out @@ -152,57 +152,27 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string - expr: _col4 - type: string + expr: UDFToInteger(_col4) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - expr: UDFToInteger(_col4) - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join8.q.out ql/src/test/results/clientpositive/join8.q.out index 97b1e92..20319f8 100644 --- ql/src/test/results/clientpositive/join8.q.out +++ ql/src/test/results/clientpositive/join8.q.out @@ -116,45 +116,23 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/join9.q.out ql/src/test/results/clientpositive/join9.q.out index f25d7d5..5b931f5 100644 --- ql/src/test/results/clientpositive/join9.q.out +++ ql/src/test/results/clientpositive/join9.q.out @@ -162,42 +162,35 @@ STAGE PLANS: outputColumnNames: _col0, _col2, _col3, _col7 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col7 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 + File Output Operator + compressed: false + GlobalTableId: 1 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string #### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Truncated Path -> Alias: /src [src2] /srcpart/ds=2008-04-08/hr=12 [src1] diff --git ql/src/test/results/clientpositive/join_map_ppr.q.out ql/src/test/results/clientpositive/join_map_ppr.q.out index f5a0f42..92f845e 100644 --- ql/src/test/results/clientpositive/join_map_ppr.q.out +++ ql/src/test/results/clientpositive/join_map_ppr.q.out @@ -99,48 +99,35 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col5 - type: string expr: _col9 type: string - expr: _col10 - type: string - expr: _col11 + expr: _col5 type: string - outputColumnNames: _col0, _col5, _col9, _col10, _col11 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string -#### A masked pattern was here #### - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string +#### A masked pattern was here #### + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -660,53 +647,40 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col5 - type: string expr: _col9 type: string - expr: _col10 - type: string - expr: _col11 + expr: _col5 type: string - outputColumnNames: _col0, _col5, _col9, _col10, _col11 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string -#### A masked pattern was here #### - name default.dest_j1 - numFiles 1 - numPartitions 0 - numRows 107 - rawDataSize 2018 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2125 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string +#### A masked pattern was here #### + name default.dest_j1 + numFiles 1 + numPartitions 0 + numRows 107 + rawDataSize 2018 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2125 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/join_view.q.out ql/src/test/results/clientpositive/join_view.q.out index 192c414..0e3c8b5 100644 --- ql/src/test/results/clientpositive/join_view.q.out +++ ql/src/test/results/clientpositive/join_view.q.out @@ -100,21 +100,12 @@ STAGE PLANS: expr: _col7 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/lateral_view_ppd.q.out ql/src/test/results/clientpositive/lateral_view_ppd.q.out index c97a297..bdb82c6 100644 --- ql/src/test/results/clientpositive/lateral_view_ppd.q.out +++ ql/src/test/results/clientpositive/lateral_view_ppd.q.out @@ -27,26 +27,17 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -58,26 +49,17 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -134,26 +116,17 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -169,26 +142,17 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -240,27 +204,16 @@ STAGE PLANS: expressions: expr: _col1 type: string - expr: _col2 - type: string - expr: _col3 - type: string expr: _col4 type: int - outputColumnNames: _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -274,27 +227,16 @@ STAGE PLANS: expressions: expr: _col1 type: string - expr: _col2 - type: string - expr: _col3 - type: string expr: _col4 type: int - outputColumnNames: _col1, _col2, _col3, _col4 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -357,26 +299,17 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -388,26 +321,17 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -424,26 +348,17 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -455,26 +370,17 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string expr: _col1 type: string expr: _col2 type: int - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/lineage1.q.out ql/src/test/results/clientpositive/lineage1.q.out index bdd69d7..9efbdf6 100644 --- ql/src/test/results/clientpositive/lineage1.q.out +++ ql/src/test/results/clientpositive/lineage1.q.out @@ -109,51 +109,37 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_l1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_l1 #### A masked pattern was here #### TableScan Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_l1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_l1 Stage: Stage-8 Conditional Operator diff --git ql/src/test/results/clientpositive/mapjoin1.q.out ql/src/test/results/clientpositive/mapjoin1.q.out index 8452ba6..7643ab9 100644 --- ql/src/test/results/clientpositive/mapjoin1.q.out +++ ql/src/test/results/clientpositive/mapjoin1.q.out @@ -92,25 +92,14 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work @@ -208,25 +197,14 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work @@ -333,25 +311,14 @@ STAGE PLANS: type: string expr: _col5 type: struct - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: struct - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work @@ -451,25 +418,14 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work @@ -567,25 +523,14 @@ STAGE PLANS: type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work @@ -688,25 +633,14 @@ STAGE PLANS: type: string expr: _col5 type: struct - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: struct - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/mapjoin_distinct.q.out ql/src/test/results/clientpositive/mapjoin_distinct.q.out index 26da883..9fbecee 100644 --- ql/src/test/results/clientpositive/mapjoin_distinct.q.out +++ ql/src/test/results/clientpositive/mapjoin_distinct.q.out @@ -63,27 +63,22 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col1 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col1 type: string - outputColumnNames: _col1 - Group By Operator - bucketGroup: false - keys: - expr: _col1 + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -231,27 +226,22 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col1 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col1 type: string - outputColumnNames: _col1 - Group By Operator - bucketGroup: false - keys: - expr: _col1 + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 + tag: -1 Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -372,20 +362,15 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: expr: _col1 type: string - outputColumnNames: _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -533,20 +518,15 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: expr: _col1 type: string - outputColumnNames: _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: -1 Local Work: Map Reduce Local Work Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/mapjoin_filter_on_outerjoin.q.out ql/src/test/results/clientpositive/mapjoin_filter_on_outerjoin.q.out index ad3f415..60a3350 100644 --- ql/src/test/results/clientpositive/mapjoin_filter_on_outerjoin.q.out +++ ql/src/test/results/clientpositive/mapjoin_filter_on_outerjoin.q.out @@ -76,7 +76,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 300.0)) + expr: ((key < 300.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -159,45 +159,30 @@ STAGE PLANS: type: string expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Reduce Output Operator + key expressions: expr: _col0 type: string - expr: _col1 + expr: _col2 type: string expr: _col4 type: string - expr: _col5 + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 type: string - expr: _col8 + expr: _col3 type: string - expr: _col9 + expr: _col4 + type: string + expr: _col5 type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col2 - type: string - expr: _col4 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col4 - type: string - expr: _col5 - type: string Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -296,7 +281,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 300.0)) + expr: ((key < 300.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -440,7 +425,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 300.0)) + expr: ((key < 300.0) and (key < 10.0)) type: boolean HashTable Sink Operator condition expressions: @@ -541,7 +526,7 @@ STAGE PLANS: alias: src1 Filter Operator predicate: - expr: ((key < 10.0) and (key < 300.0)) + expr: ((key < 300.0) and (key < 10.0)) type: boolean Reduce Output Operator key expressions: diff --git ql/src/test/results/clientpositive/merge1.q.out ql/src/test/results/clientpositive/merge1.q.out index 6522d1a..c557114 100644 --- ql/src/test/results/clientpositive/merge1.q.out +++ ql/src/test/results/clientpositive/merge1.q.out @@ -69,26 +69,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/merge2.q.out ql/src/test/results/clientpositive/merge2.q.out index 252017f..c5c4118 100644 --- ql/src/test/results/clientpositive/merge2.q.out +++ ql/src/test/results/clientpositive/merge2.q.out @@ -69,26 +69,19 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.test1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.test1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/multigroupby_singlemr.q.out ql/src/test/results/clientpositive/multigroupby_singlemr.q.out index 5161c75..f8f75b5 100644 --- ql/src/test/results/clientpositive/multigroupby_singlemr.q.out +++ ql/src/test/results/clientpositive/multigroupby_singlemr.q.out @@ -119,24 +119,17 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -189,26 +182,17 @@ STAGE PLANS: type: int expr: _col1 type: int - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -320,24 +304,17 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator @@ -390,26 +367,17 @@ STAGE PLANS: type: int expr: _col0 type: int - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -545,28 +513,17 @@ STAGE PLANS: type: int expr: _col2 type: int - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest3 Stage: Stage-0 Move Operator @@ -619,26 +576,17 @@ STAGE PLANS: type: int expr: _col1 type: int - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -735,28 +683,17 @@ STAGE PLANS: type: int expr: _col2 type: int - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest3 Group By Operator aggregations: expr: count(VALUE._col0) @@ -778,28 +715,17 @@ STAGE PLANS: type: int expr: _col1 type: int - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest4 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest4 Stage: Stage-0 Move Operator @@ -975,28 +901,17 @@ STAGE PLANS: type: int expr: _col2 type: int - expr: _col3 - type: bigint + expr: UDFToInteger(_col3) + type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - expr: UDFToInteger(_col3) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest3 Stage: Stage-0 Move Operator @@ -1049,26 +964,17 @@ STAGE PLANS: type: int expr: _col1 type: int - expr: _col2 - type: bigint + expr: UDFToInteger(_col2) + type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: UDFToInteger(_col2) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest2 Stage: Stage-1 Move Operator @@ -1113,24 +1019,17 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-2 Move Operator diff --git ql/src/test/results/clientpositive/noalias_subq1.q.out ql/src/test/results/clientpositive/noalias_subq1.q.out index b51f68c..94460fd 100644 --- ql/src/test/results/clientpositive/noalias_subq1.q.out +++ ql/src/test/results/clientpositive/noalias_subq1.q.out @@ -26,20 +26,13 @@ STAGE PLANS: expressions: expr: value type: string - expr: key - type: string - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/nonmr_fetch.q.out ql/src/test/results/clientpositive/nonmr_fetch.q.out index 92f7a66..e808d25 100644 --- ql/src/test/results/clientpositive/nonmr_fetch.q.out +++ ql/src/test/results/clientpositive/nonmr_fetch.q.out @@ -1240,19 +1240,12 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/notable_alias1.q.out ql/src/test/results/clientpositive/notable_alias1.q.out index db89c1e..01c4876 100644 --- ql/src/test/results/clientpositive/notable_alias1.q.out +++ ql/src/test/results/clientpositive/notable_alias1.q.out @@ -70,28 +70,19 @@ STAGE PLANS: expressions: expr: '1234' type: string - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToDouble(_col1) + type: double outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToDouble(_col2) - type: double - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/notable_alias2.q.out ql/src/test/results/clientpositive/notable_alias2.q.out index e144b72..8f4905d 100644 --- ql/src/test/results/clientpositive/notable_alias2.q.out +++ ql/src/test/results/clientpositive/notable_alias2.q.out @@ -70,28 +70,19 @@ STAGE PLANS: expressions: expr: '1234' type: string - expr: _col0 - type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col0) + type: int + expr: UDFToDouble(_col1) + type: double outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - expr: UDFToDouble(_col2) - type: double - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/parallel.q.out ql/src/test/results/clientpositive/parallel.q.out index 696150f..61c5415 100644 --- ql/src/test/results/clientpositive/parallel.q.out +++ ql/src/test/results/clientpositive/parallel.q.out @@ -82,19 +82,12 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-3 Map Reduce diff --git ql/src/test/results/clientpositive/ppd1.q.out ql/src/test/results/clientpositive/ppd1.q.out index 20e77e2..369efb8 100644 --- ql/src/test/results/clientpositive/ppd1.q.out +++ ql/src/test/results/clientpositive/ppd1.q.out @@ -22,21 +22,17 @@ STAGE PLANS: predicate: expr: (key > '2') type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/ppd2.q.out ql/src/test/results/clientpositive/ppd2.q.out index 65ba807..dc391c6 100644 --- ql/src/test/results/clientpositive/ppd2.q.out +++ ql/src/test/results/clientpositive/ppd2.q.out @@ -86,19 +86,12 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -588,19 +581,12 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce diff --git ql/src/test/results/clientpositive/ppd_clusterby.q.out ql/src/test/results/clientpositive/ppd_clusterby.q.out index 80e373e..919f626 100644 --- ql/src/test/results/clientpositive/ppd_clusterby.q.out +++ ql/src/test/results/clientpositive/ppd_clusterby.q.out @@ -22,31 +22,27 @@ STAGE PLANS: predicate: expr: (key = 10.0) type: boolean - Filter Operator - predicate: - expr: (key = 10.0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator diff --git ql/src/test/results/clientpositive/ppd_constant_expr.q.out ql/src/test/results/clientpositive/ppd_constant_expr.q.out index 0ad8163..9078bb9 100644 --- ql/src/test/results/clientpositive/ppd_constant_expr.q.out +++ ql/src/test/results/clientpositive/ppd_constant_expr.q.out @@ -35,28 +35,19 @@ STAGE PLANS: expressions: expr: (4 + null) type: int - expr: (key - null) + expr: UDFToInteger((key - null)) + type: int + expr: UDFToDouble((null + null)) type: double - expr: (null + null) - type: tinyint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToDouble(_col2) - type: double - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.ppd_constant_expr + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.ppd_constant_expr Stage: Stage-7 Conditional Operator @@ -197,28 +188,19 @@ STAGE PLANS: expressions: expr: (4 + null) type: int - expr: (key - null) + expr: UDFToInteger((key - null)) + type: int + expr: UDFToDouble((null + null)) type: double - expr: (null + null) - type: tinyint outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: UDFToInteger(_col1) - type: int - expr: UDFToDouble(_col2) - type: double - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.ppd_constant_expr + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.ppd_constant_expr Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/ppd_gby.q.out ql/src/test/results/clientpositive/ppd_gby.q.out index e865128..d60cc64 100644 --- ql/src/test/results/clientpositive/ppd_gby.q.out +++ ql/src/test/results/clientpositive/ppd_gby.q.out @@ -28,38 +28,34 @@ STAGE PLANS: predicate: expr: ((value > 'val_10') and (value > 'val_200')) type: boolean - Filter Operator - predicate: - expr: (value > 'val_10') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(key) + bucketGroup: false + keys: expr: value type: string - expr: key - type: string - outputColumnNames: value, key - Group By Operator - aggregations: - expr: count(key) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -317,20 +313,13 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/ppd_gby2.q.out ql/src/test/results/clientpositive/ppd_gby2.q.out index ba4875b..59d2c84 100644 --- ql/src/test/results/clientpositive/ppd_gby2.q.out +++ ql/src/test/results/clientpositive/ppd_gby2.q.out @@ -31,38 +31,34 @@ STAGE PLANS: predicate: expr: ((value > 'val_10') and (value > 'val_200')) type: boolean - Filter Operator - predicate: - expr: (value > 'val_10') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(key) + bucketGroup: false + keys: expr: value type: string - expr: key - type: string - outputColumnNames: value, key - Group By Operator - aggregations: - expr: count(key) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -255,28 +251,21 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string + Group By Operator + aggregations: + expr: max(_col0) + bucketGroup: false + keys: expr: _col1 type: bigint + mode: hash outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: max(_col0) - bucketGroup: false - keys: - expr: _col1 - type: bigint - mode: hash - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce diff --git ql/src/test/results/clientpositive/ppd_gby_join.q.out ql/src/test/results/clientpositive/ppd_gby_join.q.out index eb21387..8351f88 100644 --- ql/src/test/results/clientpositive/ppd_gby_join.q.out +++ ql/src/test/results/clientpositive/ppd_gby_join.q.out @@ -37,35 +37,31 @@ STAGE PLANS: predicate: expr: (((((key > '1') and (key < '400')) and (key > '20')) and ((value < 'val_50') or (key > '2'))) and (key <> '4')) type: boolean - Filter Operator - predicate: - expr: (key > '1') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -73,31 +69,27 @@ STAGE PLANS: predicate: expr: ((((key > '2') and (key < '400')) and (key <> '4')) and (key > '20')) type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: diff --git ql/src/test/results/clientpositive/ppd_join.q.out ql/src/test/results/clientpositive/ppd_join.q.out index 2dc4486..f53be31 100644 --- ql/src/test/results/clientpositive/ppd_join.q.out +++ ql/src/test/results/clientpositive/ppd_join.q.out @@ -34,35 +34,31 @@ STAGE PLANS: predicate: expr: (((((key > '1') and (key < '400')) and (key > '20')) and ((value < 'val_50') or (key > '2'))) and (key <> '4')) type: boolean - Filter Operator - predicate: - expr: (key > '1') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -70,35 +66,31 @@ STAGE PLANS: predicate: expr: ((((key > '2') and (key < '400')) and (key <> '4')) and (key > '20')) type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: diff --git ql/src/test/results/clientpositive/ppd_join2.q.out ql/src/test/results/clientpositive/ppd_join2.q.out index aabd394..304e749 100644 --- ql/src/test/results/clientpositive/ppd_join2.q.out +++ ql/src/test/results/clientpositive/ppd_join2.q.out @@ -41,35 +41,31 @@ STAGE PLANS: predicate: expr: (((((key <> '302') and (key < '400')) and (key <> '311')) and ((value <> 'val_50') or (key > '1'))) and (key <> '14')) type: boolean - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -77,35 +73,31 @@ STAGE PLANS: predicate: expr: ((((key <> '305') and (key < '400')) and (key <> '14')) and (key <> '311')) type: boolean - Filter Operator - predicate: - expr: (key <> '305') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -151,29 +143,25 @@ STAGE PLANS: predicate: expr: ((key <> '306') and (sqrt(key) <> 13)) type: boolean - Filter Operator - predicate: - expr: (key <> '306') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: 1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string Reduce Operator Tree: Join Operator condition map: diff --git ql/src/test/results/clientpositive/ppd_join3.q.out ql/src/test/results/clientpositive/ppd_join3.q.out index 68bc1dd..763c54f 100644 --- ql/src/test/results/clientpositive/ppd_join3.q.out +++ ql/src/test/results/clientpositive/ppd_join3.q.out @@ -40,35 +40,31 @@ STAGE PLANS: predicate: expr: ((((((key <> '11') and (key < '400')) and (key > '0')) and ((value <> 'val_500') or (key > '1'))) and (key <> '4')) and (key <> '1')) type: boolean - Filter Operator - predicate: - expr: (key <> '11') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -76,35 +72,31 @@ STAGE PLANS: predicate: expr: (((((key <> '12') and (key < '400')) and (key <> '4')) and (key > '0')) and (key <> '1')) type: boolean - Filter Operator - predicate: - expr: (key <> '12') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src3:src TableScan alias: src @@ -112,31 +104,27 @@ STAGE PLANS: predicate: expr: (((((key <> '13') and (key < '400')) and (key <> '1')) and (key > '0')) and (key <> '4')) type: boolean - Filter Operator - predicate: - expr: (key <> '13') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Filter Operator + predicate: + expr: (_col0 < '400') + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: diff --git ql/src/test/results/clientpositive/ppd_multi_insert.q.out ql/src/test/results/clientpositive/ppd_multi_insert.q.out index 851ddff..a0a444b 100644 --- ql/src/test/results/clientpositive/ppd_multi_insert.q.out +++ ql/src/test/results/clientpositive/ppd_multi_insert.q.out @@ -88,74 +88,55 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.mi1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi1 Filter Operator predicate: expr: ((_col0 >= 100.0) and (_col0 < 200.0)) type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.mi2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi2 Filter Operator predicate: expr: ((_col0 >= 200.0) and (_col0 < 300.0)) type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.mi3 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi3 Filter Operator predicate: expr: (_col0 >= 300.0) @@ -1395,74 +1376,55 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.mi1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi1 Filter Operator predicate: expr: ((_col0 >= 100.0) and (_col0 < 200.0)) type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.mi2 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi2 Filter Operator predicate: expr: ((_col0 >= 200.0) and (_col0 < 300.0)) type: boolean Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 3 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.mi3 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi3 Filter Operator predicate: expr: (_col0 >= 300.0) diff --git ql/src/test/results/clientpositive/ppd_random.q.out ql/src/test/results/clientpositive/ppd_random.q.out index de2af20..46a3dd8 100644 --- ql/src/test/results/clientpositive/ppd_random.q.out +++ ql/src/test/results/clientpositive/ppd_random.q.out @@ -54,29 +54,25 @@ STAGE PLANS: predicate: expr: (key > '2') type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - expr: value + tag: 1 + value expressions: + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: diff --git ql/src/test/results/clientpositive/ppd_repeated_alias.q.out ql/src/test/results/clientpositive/ppd_repeated_alias.q.out index ae36745..959415e 100644 --- ql/src/test/results/clientpositive/ppd_repeated_alias.q.out +++ ql/src/test/results/clientpositive/ppd_repeated_alias.q.out @@ -188,21 +188,12 @@ STAGE PLANS: expr: _col6 type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -290,21 +281,12 @@ STAGE PLANS: expr: _col1 type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/ppd_udf_col.q.out ql/src/test/results/clientpositive/ppd_udf_col.q.out index 4ff573e..7caacee 100644 --- ql/src/test/results/clientpositive/ppd_udf_col.q.out +++ ql/src/test/results/clientpositive/ppd_udf_col.q.out @@ -26,34 +26,30 @@ STAGE PLANS: predicate: expr: (key = 100.0) type: boolean - Filter Operator - predicate: - expr: (key = 100.0) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: rand() - type: double - outputColumnNames: _col0, _col2 - Filter Operator - predicate: - expr: (_col2 <= 0.1) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col2 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: rand() + type: double + outputColumnNames: _col0, _col2 + Filter Operator + predicate: + expr: (_col2 <= 0.1) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col2 + type: double + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -92,46 +88,42 @@ STAGE PLANS: predicate: expr: (key = 100.0) type: boolean - Filter Operator - predicate: - expr: (key = 100.0) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: rand() - type: double - outputColumnNames: _col0, _col2 - Filter Operator - predicate: - expr: (_col2 <= 0.1) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col2 - type: double - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col1 > 0.1) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: rand() + type: double + outputColumnNames: _col0, _col2 + Filter Operator + predicate: + expr: (_col2 <= 0.1) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col2 + type: double + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col1 > 0.1) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: double + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -166,38 +158,34 @@ STAGE PLANS: predicate: expr: (key = 100.0) type: boolean - Filter Operator - predicate: - expr: (key = 100.0) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: rand() - type: double - expr: hex(4) - type: string - outputColumnNames: _col0, _col2, _col3 - Filter Operator - predicate: - expr: (_col3 <= 3.0) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col2 - type: double - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: rand() + type: double + expr: hex(4) + type: string + outputColumnNames: _col0, _col2, _col3 + Filter Operator + predicate: + expr: (_col3 <= 3.0) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col2 + type: double + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -232,38 +220,34 @@ STAGE PLANS: predicate: expr: (key = 100.0) type: boolean - Filter Operator - predicate: - expr: (key = 100.0) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: rand() - type: double - expr: (value * 10) - type: double - outputColumnNames: _col0, _col2, _col3 - Filter Operator - predicate: - expr: (_col3 <= 200.0) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col2 - type: double - expr: _col3 - type: double - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: rand() + type: double + expr: (value * 10) + type: double + outputColumnNames: _col0, _col2, _col3 + Filter Operator + predicate: + expr: (_col3 <= 200.0) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col2 + type: double + expr: _col3 + type: double + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -378,20 +362,13 @@ STAGE PLANS: expr: _col2 type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/ppd_union.q.out ql/src/test/results/clientpositive/ppd_union.q.out index d97e9d9..1881145 100644 --- ql/src/test/results/clientpositive/ppd_union.q.out +++ ql/src/test/results/clientpositive/ppd_union.q.out @@ -34,35 +34,31 @@ STAGE PLANS: predicate: expr: (((key < '100') and (key > '4')) and (value > 'val_4')) type: boolean - Filter Operator - predicate: - expr: (key < '100') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Filter Operator - predicate: - expr: ((_col0 > '4') and (_col1 > 'val_4')) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Filter Operator + predicate: + expr: ((_col0 > '4') and (_col1 > 'val_4')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unioned_query-subquery2:src TableScan alias: src @@ -70,35 +66,31 @@ STAGE PLANS: predicate: expr: (((key > '150') and (key > '4')) and (value > 'val_4')) type: boolean - Filter Operator - predicate: - expr: (key > '150') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Filter Operator - predicate: - expr: ((_col0 > '4') and (_col1 > 'val_4')) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Filter Operator + predicate: + expr: ((_col0 > '4') and (_col1 > 'val_4')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/ppd_union_view.q.out ql/src/test/results/clientpositive/ppd_union_view.q.out index 6663943..d511ed5 100644 --- ql/src/test/results/clientpositive/ppd_union_view.q.out +++ ql/src/test/results/clientpositive/ppd_union_view.q.out @@ -389,32 +389,23 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false t1-subquery1:subq-subquery1:t1_new TableScan alias: t1_new @@ -443,32 +434,23 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -692,6 +674,37 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types string:string:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + t1-subquery1:subq-subquery1:t1_new + TableScan + alias: t1_new + GatherStats: false + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Union Select Operator expressions: expr: _col0 @@ -718,55 +731,6 @@ STAGE PLANS: TotalFiles: 1 GatherStats: false MultiFileSpray: false - t1-subquery1:subq-subquery1:t1_new - TableScan - alias: t1_new - GatherStats: false - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - outputColumnNames: _col0, _col1, _col2 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types string:string:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out index 9fd6422..00807d3 100644 --- ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out +++ ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out @@ -1204,30 +1204,23 @@ STAGE PLANS: expressions: expr: l_shipdate type: string - expr: l_orderkey - type: int + expr: UDFToLong(l_orderkey) + type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToLong(_col1) - type: bigint - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -2938,19 +2931,12 @@ STAGE PLANS: expr: _col1 type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/quote1.q.out ql/src/test/results/clientpositive/quote1.q.out index 5c78eff..51b47b3 100644 --- ql/src/test/results/clientpositive/quote1.q.out +++ ql/src/test/results/clientpositive/quote1.q.out @@ -37,26 +37,19 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/rcfile_merge1.q.out ql/src/test/results/clientpositive/rcfile_merge1.q.out index f8d1a25..385dde5 100644 --- ql/src/test/results/clientpositive/rcfile_merge1.q.out +++ ql/src/test/results/clientpositive/rcfile_merge1.q.out @@ -54,30 +54,21 @@ STAGE PLANS: alias: src Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: pmod(hash(key), 100) type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat - output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat - serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - name: default.rcfile_merge1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: default.rcfile_merge1 Stage: Stage-7 Conditional Operator @@ -675,30 +666,21 @@ STAGE PLANS: alias: src Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: pmod(hash(key), 100) type: int outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat - output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat - serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - name: default.rcfile_merge1b + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: default.rcfile_merge1b Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/rcfile_merge2.q.out ql/src/test/results/clientpositive/rcfile_merge2.q.out index 24966ae..a108309 100644 --- ql/src/test/results/clientpositive/rcfile_merge2.q.out +++ ql/src/test/results/clientpositive/rcfile_merge2.q.out @@ -43,8 +43,8 @@ STAGE PLANS: alias: src Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: pmod(hash(key), 10) @@ -52,25 +52,14 @@ STAGE PLANS: expr: pmod(hash(value), 10) type: int outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: int - expr: _col3 - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat - output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat - serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - name: default.rcfile_merge2a + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: default.rcfile_merge2a Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/rcfile_null_value.q.out ql/src/test/results/clientpositive/rcfile_null_value.q.out index bf0d79f..38a2f40 100644 --- ql/src/test/results/clientpositive/rcfile_null_value.q.out +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out @@ -166,45 +166,23 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat - output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat - serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - name: default.dest1_rc + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + name: default.dest1_rc Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/sample1.q.out ql/src/test/results/clientpositive/sample1.q.out index a88d2d4..113e641 100644 --- ql/src/test/results/clientpositive/sample1.q.out +++ ql/src/test/results/clientpositive/sample1.q.out @@ -43,8 +43,8 @@ STAGE PLANS: type: boolean Select Operator expressions: - expr: key - type: string + expr: UDFToInteger(key) + type: int expr: value type: string expr: ds @@ -52,41 +52,30 @@ STAGE PLANS: expr: hr type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,dt,hr - columns.types int:string:string:string -#### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,dt,hr + columns.types int:string:string:string +#### A masked pattern was here #### + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/sample7.q.out ql/src/test/results/clientpositive/sample7.q.out index 5770aad..e3cc5d0 100644 --- ql/src/test/results/clientpositive/sample7.q.out +++ ql/src/test/results/clientpositive/sample7.q.out @@ -39,44 +39,39 @@ STAGE PLANS: Filter Operator isSamplingPred: false predicate: - expr: (key > 100) + expr: ((((hash(key) & 2147483647) % 4) = 0) and (key > 100)) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string -#### A masked pattern was here #### - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string +#### A masked pattern was here #### + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/sample9.q.out ql/src/test/results/clientpositive/sample9.q.out index 86e75dd..7694961 100644 --- ql/src/test/results/clientpositive/sample9.q.out +++ ql/src/test/results/clientpositive/sample9.q.out @@ -33,30 +33,23 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 + File Output Operator + compressed: false + GlobalTableId: 0 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types int:string - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types int:string + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/select_as_omitted.q.out ql/src/test/results/clientpositive/select_as_omitted.q.out index d11ffeb..e25778c 100644 --- ql/src/test/results/clientpositive/select_as_omitted.q.out +++ ql/src/test/results/clientpositive/select_as_omitted.q.out @@ -33,24 +33,17 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + tag: -1 + value expressions: expr: _col0 type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Extract Limit diff --git ql/src/test/results/clientpositive/select_transform_hint.q.out ql/src/test/results/clientpositive/select_transform_hint.q.out index e676188..fef2f70 100644 --- ql/src/test/results/clientpositive/select_transform_hint.q.out +++ ql/src/test/results/clientpositive/select_transform_hint.q.out @@ -64,24 +64,17 @@ STAGE PLANS: expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - Transform Operator - command: cat - output info: + Transform Operator + command: cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Local Work: Map Reduce Local Work diff --git ql/src/test/results/clientpositive/semijoin.q.out ql/src/test/results/clientpositive/semijoin.q.out index e60513c..658e60c 100644 --- ql/src/test/results/clientpositive/semijoin.q.out +++ ql/src/test/results/clientpositive/semijoin.q.out @@ -784,27 +784,22 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int + mode: hash outputColumnNames: _col0 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -911,31 +906,24 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int expr: _col1 type: string + mode: hash outputColumnNames: _col0, _col1 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: expr: _col0 type: int - expr: _col1 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -1039,27 +1027,22 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int + mode: hash outputColumnNames: _col0 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + Map-reduce partition columns: expr: _col0 type: int - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -1197,20 +1180,15 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -1794,20 +1772,15 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int Local Work: Map Reduce Local Work Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/skewjoin.q.out ql/src/test/results/clientpositive/skewjoin.q.out index 6fadd44..c6c7590 100644 --- ql/src/test/results/clientpositive/skewjoin.q.out +++ ql/src/test/results/clientpositive/skewjoin.q.out @@ -111,26 +111,19 @@ STAGE PLANS: outputColumnNames: _col0, _col5 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Stage: Stage-5 Conditional Operator @@ -171,26 +164,19 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col5 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 Local Work: Map Reduce Local Work @@ -1191,7 +1177,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 100.0) and (key < 80.0)) + expr: ((key < 80.0) and (key < 100.0)) type: boolean Select Operator expressions: @@ -1241,7 +1227,7 @@ STAGE PLANS: alias: src Filter Operator predicate: - expr: ((key < 80.0) and (key < 100.0)) + expr: ((key < 100.0) and (key < 80.0)) type: boolean Select Operator expressions: @@ -1574,28 +1560,21 @@ STAGE PLANS: expr: _col5 type: string outputColumnNames: _col0, _col5 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col5 - Group By Operator - aggregations: - expr: sum(hash(_col0)) - expr: sum(hash(_col5)) - bucketGroup: false - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint - expr: _col1 - type: bigint + Group By Operator + aggregations: + expr: sum(hash(_col0)) + expr: sum(hash(_col5)) + bucketGroup: false + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint + expr: _col1 + type: bigint Local Work: Map Reduce Local Work Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/smb_mapjoin9.q.out ql/src/test/results/clientpositive/smb_mapjoin9.q.out index af13e22..c91edf9 100644 --- ql/src/test/results/clientpositive/smb_mapjoin9.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin9.q.out @@ -56,45 +56,32 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: int - expr: _col2 - type: string expr: _col5 type: int expr: _col6 type: string expr: _col7 type: string - outputColumnNames: _col0, _col2, _col5, _col6, _col7 - Select Operator - expressions: - expr: _col5 - type: int - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col0 - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 + expr: _col0 + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types int:string:string:int - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types int:string:string:int + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Stage: Stage-0 @@ -164,45 +151,32 @@ STAGE PLANS: Position of Big Table: 1 Select Operator expressions: - expr: _col0 - type: int - expr: _col2 - type: string expr: _col5 type: int expr: _col6 type: string expr: _col7 type: string - outputColumnNames: _col0, _col2, _col5, _col6, _col7 - Select Operator - expressions: - expr: _col5 - type: int - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col0 - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 + expr: _col0 + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 #### A masked pattern was here #### - NumFilesPerFileSink: 1 + NumFilesPerFileSink: 1 #### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types int:string:string:int - escape.delim \ - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types int:string:string:int + escape.delim \ + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Stage: Stage-0 @@ -305,35 +279,22 @@ STAGE PLANS: Position of Big Table: 0 Select Operator expressions: - expr: _col0 - type: int - expr: _col2 - type: string expr: _col5 type: int expr: _col6 type: string expr: _col7 type: string - outputColumnNames: _col0, _col2, _col5, _col6, _col7 - Select Operator - expressions: - expr: _col5 - type: int - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col0 - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - name: default.smb_mapjoin9_results + expr: _col0 + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + name: default.smb_mapjoin9_results Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_1.q.out ql/src/test/results/clientpositive/smb_mapjoin_1.q.out index 3a01b04..2d430b4 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_1.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_1.q.out @@ -73,24 +73,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -149,24 +138,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -230,24 +208,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -310,24 +277,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -395,24 +351,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -471,24 +416,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -552,24 +486,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -632,24 +555,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_10.q.out ql/src/test/results/clientpositive/smb_mapjoin_10.q.out index 34937eb..f5fe945 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_10.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_10.q.out @@ -109,36 +109,13 @@ STAGE PLANS: type: string expr: _col11 type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col7, _col8, _col9, _col10, _col11 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: int - expr: _col3 - type: string - expr: _col4 - type: string - expr: _col7 - type: int - expr: _col8 - type: int - expr: _col9 - type: int - expr: _col10 - type: string - expr: _col11 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_11.q.out ql/src/test/results/clientpositive/smb_mapjoin_11.q.out index 6f6a6a9..309769e 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_11.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_11.q.out @@ -89,41 +89,34 @@ STAGE PLANS: type: int expr: _col6 type: string - outputColumnNames: _col0, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col6 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - Static Partition Specification: ds=1/ -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count 16 - bucket_field_name key - columns key,value - columns.types int:string -#### A masked pattern was here #### - name default.test_table3 - partition_columns ds - serialization.ddl struct test_table3 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.test_table3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Static Partition Specification: ds=1/ +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count 16 + bucket_field_name key + columns key,value + columns.types int:string +#### A masked pattern was here #### + name default.test_table3 + partition_columns ds + serialization.ddl struct test_table3 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.test_table3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/smb_mapjoin_12.q.out ql/src/test/results/clientpositive/smb_mapjoin_12.q.out index 0764945..95e594c 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_12.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_12.q.out @@ -109,42 +109,35 @@ STAGE PLANS: type: int expr: _col6 type: string - outputColumnNames: _col0, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col6 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - Static Partition Specification: ds=1/ -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - SORTBUCKETCOLSPREFIX TRUE - bucket_count 16 - bucket_field_name key - columns key,value - columns.types int:string -#### A masked pattern was here #### - name default.test_table3 - partition_columns ds - serialization.ddl struct test_table3 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.test_table3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Static Partition Specification: ds=1/ +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + SORTBUCKETCOLSPREFIX TRUE + bucket_count 16 + bucket_field_name key + columns key,value + columns.types int:string +#### A masked pattern was here #### + name default.test_table3 + partition_columns ds + serialization.ddl struct test_table3 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.test_table3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -339,51 +332,42 @@ STAGE PLANS: expressions: expr: _col0 type: int - expr: _col1 + expr: concat(_col1, _col6) type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: concat(_col1, _col6) - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 - Static Partition Specification: ds=2/ -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - SORTBUCKETCOLSPREFIX TRUE - bucket_count 16 - bucket_field_name key - columns key,value - columns.types int:string -#### A masked pattern was here #### - name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 - partition_columns ds - rawDataSize 32904 - serialization.ddl struct test_table3 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.test_table3 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Static Partition Specification: ds=2/ +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + SORTBUCKETCOLSPREFIX TRUE + bucket_count 16 + bucket_field_name key + columns key,value + columns.types int:string +#### A masked pattern was here #### + name default.test_table3 + numFiles 16 + numPartitions 1 + numRows 3084 + partition_columns ds + rawDataSize 32904 + serialization.ddl struct test_table3 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 35988 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.test_table3 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/smb_mapjoin_13.q.out ql/src/test/results/clientpositive/smb_mapjoin_13.q.out index 1e13994..1204f88 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_13.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_13.q.out @@ -109,33 +109,22 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string - expr: _col4 + expr: _col2 type: int - expr: _col5 + expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: int - expr: _col3 - type: string Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -319,33 +308,22 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string - expr: _col4 + expr: _col2 type: int - expr: _col5 + expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: int - expr: _col3 - type: string Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/smb_mapjoin_14.q.out ql/src/test/results/clientpositive/smb_mapjoin_14.q.out index 56f3d96..234579a 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_14.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_14.q.out @@ -76,20 +76,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -189,37 +187,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Select Operator - expressions: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: - expr: _col0 - type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -361,37 +349,27 @@ STAGE PLANS: expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count() + bucketGroup: false + keys: expr: _col0 type: int - outputColumnNames: _col0 - Select Operator - expressions: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count() - bucketGroup: false - keys: - expr: _col0 - type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -403,19 +381,18 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -537,19 +514,18 @@ STAGE PLANS: 1 [Column[_col0]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -655,19 +631,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -794,36 +769,30 @@ STAGE PLANS: expr: key type: int outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: int - outputColumnNames: _col0 - Sorted Merge Bucket Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 - Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Sorted Merge Bucket Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 + Select Operator + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -952,19 +921,18 @@ STAGE PLANS: 1 [Column[_col0]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1091,19 +1059,18 @@ STAGE PLANS: 1 [Column[_col0]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -1197,19 +1164,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1306,19 +1272,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1428,19 +1393,18 @@ STAGE PLANS: 2 [Column[_col0]] Position of Big Table: 2 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1554,20 +1518,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Select Operator - Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/smb_mapjoin_15.q.out ql/src/test/results/clientpositive/smb_mapjoin_15.q.out index f329a84..8990856 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_15.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_15.q.out @@ -83,33 +83,22 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string - expr: _col4 + expr: _col2 type: int - expr: _col5 + expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: int - expr: _col3 - type: string Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -339,41 +328,26 @@ STAGE PLANS: type: int expr: _col7 type: string - outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: int expr: _col2 type: string - expr: _col5 + expr: _col3 type: int - expr: _col6 + expr: _col4 type: int - expr: _col7 + expr: _col5 type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: int - expr: _col4 - type: int - expr: _col5 - type: string Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -543,41 +517,26 @@ STAGE PLANS: type: int expr: _col7 type: string - outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: int expr: _col2 type: string - expr: _col5 + expr: _col3 type: int - expr: _col6 + expr: _col4 type: int - expr: _col7 + expr: _col5 type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: int - expr: _col4 - type: int - expr: _col5 - type: string Needs Tagging: false Path -> Alias: #### A masked pattern was here #### @@ -777,41 +736,26 @@ STAGE PLANS: type: int expr: _col7 type: string - outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 - Select Operator - expressions: + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: int expr: _col2 type: string - expr: _col5 + expr: _col3 type: int - expr: _col6 + expr: _col4 type: int - expr: _col7 + expr: _col5 type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string - expr: _col3 - type: int - expr: _col4 - type: int - expr: _col5 - type: string Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/smb_mapjoin_16.q.out ql/src/test/results/clientpositive/smb_mapjoin_16.q.out index c4e633d..a1e9a3f 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_16.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_16.q.out @@ -66,19 +66,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/smb_mapjoin_2.q.out ql/src/test/results/clientpositive/smb_mapjoin_2.q.out index 2a745d3..8f1ef15 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_2.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_2.q.out @@ -73,24 +73,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -151,24 +140,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -232,24 +210,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -314,24 +281,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -399,24 +355,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -477,24 +422,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -558,24 +492,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -640,24 +563,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_3.q.out ql/src/test/results/clientpositive/smb_mapjoin_3.q.out index 562fbe3..cc6733b 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_3.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_3.q.out @@ -73,24 +73,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -151,24 +140,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -231,24 +209,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -313,24 +280,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -397,24 +353,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -475,24 +420,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -555,24 +489,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -637,24 +560,13 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_4.q.out ql/src/test/results/clientpositive/smb_mapjoin_4.q.out index 0d70fea..102972e 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_4.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_4.q.out @@ -80,28 +80,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -169,28 +154,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -258,28 +228,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -352,28 +307,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -447,28 +387,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -547,28 +472,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -638,28 +548,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -731,28 +626,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -826,28 +706,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -923,28 +788,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1014,28 +864,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1112,28 +947,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1207,28 +1027,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_5.q.out ql/src/test/results/clientpositive/smb_mapjoin_5.q.out index e42c6f1..6f1e6f2 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_5.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_5.q.out @@ -80,28 +80,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -169,28 +154,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -258,28 +228,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -352,28 +307,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -447,28 +387,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -547,28 +472,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -638,28 +548,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -731,28 +626,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -826,28 +706,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -923,28 +788,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1014,28 +864,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1112,28 +947,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1207,28 +1027,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_6.q.out ql/src/test/results/clientpositive/smb_mapjoin_6.q.out index c8474b3..0bd3a5d 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_6.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_6.q.out @@ -98,26 +98,15 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-7 Conditional Operator @@ -1364,26 +1353,15 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-7 Conditional Operator @@ -2734,26 +2712,15 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-7 Conditional Operator @@ -2932,26 +2899,15 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-7 Conditional Operator @@ -3137,28 +3093,13 @@ STAGE PLANS: type: int expr: _col9 type: string - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/smb_mapjoin_7.q.out ql/src/test/results/clientpositive/smb_mapjoin_7.q.out index e33fe1f..0012c44 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_7.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_7.q.out @@ -665,26 +665,15 @@ STAGE PLANS: type: int expr: _col5 type: string - outputColumnNames: _col0, _col1, _col4, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_1.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_1.q.out index 8446f6f..4b39705 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_1.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_1.q.out @@ -85,23 +85,18 @@ STAGE PLANS: outputColumnNames: _col0 Position of Big Table: 0 Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_2.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_2.q.out index fe2d3fa..e67def0 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_2.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_2.q.out @@ -93,23 +93,18 @@ STAGE PLANS: outputColumnNames: _col0 Position of Big Table: 0 Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_3.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_3.q.out index 0ce3260..262c5bd 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_3.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_3.q.out @@ -93,23 +93,18 @@ STAGE PLANS: outputColumnNames: _col0 Position of Big Table: 0 Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_4.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_4.q.out index b9aaa9e..9ad9d65 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_4.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_4.q.out @@ -116,23 +116,18 @@ STAGE PLANS: outputColumnNames: _col0 Position of Big Table: 0 Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Reduce Operator Tree: diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out index b0e4608..c390b5e 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out @@ -93,19 +93,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out index a0af00b..7dabb55 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out @@ -123,19 +123,18 @@ STAGE PLANS: Position of Big Table: 0 BucketMapJoin: true Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out index 82b7773..c321351 100644 --- ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out +++ ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out @@ -179,19 +179,18 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Select Operator - Select Operator - Group By Operator - aggregations: - expr: count() - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count() + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/split_sample.q.out ql/src/test/results/clientpositive/split_sample.q.out index 7a38661..bb00b90 100644 --- ql/src/test/results/clientpositive/split_sample.q.out +++ ql/src/test/results/clientpositive/split_sample.q.out @@ -4120,19 +4120,12 @@ STAGE PLANS: expr: _col4 type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/stats11.q.out ql/src/test/results/clientpositive/stats11.q.out index 006a43a..9c96e05 100644 --- ql/src/test/results/clientpositive/stats11.q.out +++ ql/src/test/results/clientpositive/stats11.q.out @@ -348,42 +348,31 @@ STAGE PLANS: type: string expr: _col5 type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col5, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -849,47 +838,36 @@ STAGE PLANS: type: string expr: _col5 type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col5, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 -#### A masked pattern was here #### - NumFilesPerFileSink: 1 -#### A masked pattern was here #### - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string -#### A masked pattern was here #### - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 -#### A masked pattern was here #### - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string +#### A masked pattern was here #### + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + rawDataSize 8519 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false diff --git ql/src/test/results/clientpositive/subq.q.out ql/src/test/results/clientpositive/subq.q.out index b727e89..9592148 100644 --- ql/src/test/results/clientpositive/subq.q.out +++ ql/src/test/results/clientpositive/subq.q.out @@ -40,19 +40,12 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-6 Conditional Operator diff --git ql/src/test/results/clientpositive/subq2.q.out ql/src/test/results/clientpositive/subq2.q.out index bcc998d..7ccacf9 100644 --- ql/src/test/results/clientpositive/subq2.q.out +++ ql/src/test/results/clientpositive/subq2.q.out @@ -69,19 +69,12 @@ STAGE PLANS: expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/type_widening.q.out ql/src/test/results/clientpositive/type_widening.q.out index 336e7d3..2c85a1d 100644 --- ql/src/test/results/clientpositive/type_widening.q.out +++ ql/src/test/results/clientpositive/type_widening.q.out @@ -65,29 +65,24 @@ STAGE PLANS: alias: src Select Operator expressions: - expr: 0 - type: int + expr: UDFToLong(0) + type: bigint outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToLong(_col0) - type: bigint - outputColumnNames: _col0 - Union - Select Operator - expressions: + Union + Select Operator + expressions: + expr: _col0 + type: bigint + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: bigint + sort order: + + tag: -1 + value expressions: expr: _col0 type: bigint - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: bigint - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: bigint null-subquery2:a-subquery2:src TableScan alias: src diff --git ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out index 0612dd7..c91d594 100644 --- ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out +++ ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out @@ -159,19 +159,12 @@ STAGE PLANS: expr: value type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/union10.q.out ql/src/test/results/clientpositive/union10.q.out index 0ad364c..ad05b38 100644 --- ql/src/test/results/clientpositive/union10.q.out +++ ql/src/test/results/clientpositive/union10.q.out @@ -90,24 +90,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable #### A masked pattern was here #### TableScan Union @@ -115,24 +108,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable #### A masked pattern was here #### TableScan Union @@ -140,24 +126,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable Stage: Stage-8 Conditional Operator diff --git ql/src/test/results/clientpositive/union12.q.out ql/src/test/results/clientpositive/union12.q.out index b518407..4a1dea6 100644 --- ql/src/test/results/clientpositive/union12.q.out +++ ql/src/test/results/clientpositive/union12.q.out @@ -90,24 +90,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable #### A masked pattern was here #### TableScan Union @@ -115,24 +108,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable #### A masked pattern was here #### TableScan Union @@ -140,24 +126,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable Stage: Stage-8 Conditional Operator diff --git ql/src/test/results/clientpositive/union25.q.out ql/src/test/results/clientpositive/union25.q.out index 030a7c1..b0786e9 100644 --- ql/src/test/results/clientpositive/union25.q.out +++ ql/src/test/results/clientpositive/union25.q.out @@ -145,20 +145,15 @@ STAGE PLANS: expressions: expr: _col0 type: string - outputColumnNames: _col0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col0 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + expr: _col0 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce diff --git ql/src/test/results/clientpositive/union28.q.out ql/src/test/results/clientpositive/union28.q.out index a98ecee..32d1b33 100644 --- ql/src/test/results/clientpositive/union28.q.out +++ ql/src/test/results/clientpositive/union28.q.out @@ -158,26 +158,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union null-subquery1:a-subquery1:src TableScan alias: src @@ -191,26 +184,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union Stage: Stage-8 Conditional Operator diff --git ql/src/test/results/clientpositive/union29.q.out ql/src/test/results/clientpositive/union29.q.out index 907cf34..39b4140 100644 --- ql/src/test/results/clientpositive/union29.q.out +++ ql/src/test/results/clientpositive/union29.q.out @@ -59,26 +59,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union null-subquery2:a-subquery2-subquery1:subq-subquery1:src TableScan alias: src @@ -100,26 +93,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union null-subquery2:a-subquery2-subquery2:subq-subquery2:src TableScan alias: src @@ -141,26 +127,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union Stage: Stage-7 Conditional Operator diff --git ql/src/test/results/clientpositive/union30.q.out ql/src/test/results/clientpositive/union30.q.out index 3a77c0a..1e759b0 100644 --- ql/src/test/results/clientpositive/union30.q.out +++ ql/src/test/results/clientpositive/union30.q.out @@ -217,26 +217,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union null-subquery2:aa-subquery2:src TableScan alias: src @@ -250,26 +243,19 @@ STAGE PLANS: Union Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.union_subq_union + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.union_subq_union Stage: Stage-10 Conditional Operator diff --git ql/src/test/results/clientpositive/union31.q.out ql/src/test/results/clientpositive/union31.q.out index a872a57..2e271da 100644 --- ql/src/test/results/clientpositive/union31.q.out +++ ql/src/test/results/clientpositive/union31.q.out @@ -195,24 +195,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t3 Stage: Stage-0 Move Operator @@ -257,24 +250,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t4 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t4 Stage: Stage-1 Move Operator @@ -535,24 +521,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t5 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t5 Group By Operator aggregations: expr: sum(VALUE._col0) @@ -566,24 +545,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t6 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t6 Stage: Stage-0 Move Operator @@ -1004,24 +976,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t7 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t7 Group By Operator aggregations: expr: count(VALUE._col0) @@ -1035,24 +1000,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 2 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.t8 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.t8 Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/union32.q.out ql/src/test/results/clientpositive/union32.q.out index 29681f1..2f84314 100644 --- ql/src/test/results/clientpositive/union32.q.out +++ ql/src/test/results/clientpositive/union32.q.out @@ -73,29 +73,24 @@ STAGE PLANS: alias: t2 Select Operator expressions: - expr: UDFToLong(key) - type: bigint + expr: UDFToDouble(UDFToLong(key)) + type: double outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - outputColumnNames: _col0 - Union - Select Operator - expressions: + Union + Select Operator + expressions: + expr: _col0 + type: double + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: double + sort order: + + tag: -1 + value expressions: expr: _col0 type: double - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: double - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: double Reduce Operator Tree: Extract File Output Operator @@ -214,20 +209,15 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: UDFToLong(_col0) - type: bigint + expr: UDFToDouble(UDFToLong(_col0)) + type: double outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -402,20 +392,15 @@ STAGE PLANS: outputColumnNames: _col0 Select Operator expressions: - expr: UDFToLong(_col0) - type: bigint + expr: UDFToDouble(UDFToLong(_col0)) + type: double outputColumnNames: _col0 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -593,24 +578,17 @@ STAGE PLANS: outputColumnNames: _col0, _col4 Select Operator expressions: - expr: UDFToLong(_col0) - type: bigint - expr: UDFToDouble(_col4) + expr: UDFToDouble(UDFToLong(_col0)) type: double + expr: UDFToString(UDFToDouble(_col4)) + type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - expr: UDFToString(_col1) - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -798,24 +776,17 @@ STAGE PLANS: outputColumnNames: _col0, _col4 Select Operator expressions: - expr: UDFToLong(_col0) - type: bigint + expr: UDFToDouble(UDFToLong(_col0)) + type: double expr: UDFToDouble(_col4) type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToDouble(_col0) - type: double - expr: _col1 - type: double - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -848,35 +819,28 @@ STAGE PLANS: expressions: expr: UDFToDouble(key) type: double - expr: key - type: string + expr: UDFToDouble(key) + type: double outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: double - expr: UDFToDouble(_col1) - type: double - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: + Union + Select Operator + expressions: + expr: _col0 + type: double + expr: _col1 + type: double + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: double + sort order: + + tag: -1 + value expressions: expr: _col0 type: double expr: _col1 type: double - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: double - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: double - expr: _col1 - type: double Reduce Operator Tree: Extract File Output Operator diff --git ql/src/test/results/clientpositive/union33.q.out ql/src/test/results/clientpositive/union33.q.out index efd5902..94c82aa 100644 --- ql/src/test/results/clientpositive/union33.q.out +++ ql/src/test/results/clientpositive/union33.q.out @@ -122,22 +122,15 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToString(_col1) + type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToString(_col1) - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -396,22 +389,15 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToString(_col1) + type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToString(_col1) - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-3 Map Reduce diff --git ql/src/test/results/clientpositive/union4.q.out ql/src/test/results/clientpositive/union4.q.out index b9bae59..97d8223 100644 --- ql/src/test/results/clientpositive/union4.q.out +++ ql/src/test/results/clientpositive/union4.q.out @@ -87,24 +87,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable #### A masked pattern was here #### TableScan Union @@ -112,24 +105,17 @@ STAGE PLANS: expressions: expr: _col0 type: string - expr: _col1 - type: bigint + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable Stage: Stage-8 Conditional Operator diff --git ql/src/test/results/clientpositive/union_lateralview.q.out ql/src/test/results/clientpositive/union_lateralview.q.out index c8ab3fc..4397cc6 100644 --- ql/src/test/results/clientpositive/union_lateralview.q.out +++ ql/src/test/results/clientpositive/union_lateralview.q.out @@ -235,30 +235,21 @@ STAGE PLANS: outputColumnNames: _col0, _col2, _col3 Select Operator expressions: - expr: _col3 - type: string + expr: UDFToInteger(_col3) + type: int expr: _col0 type: int expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: int - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.test_union_lateral_view + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.test_union_lateral_view Stage: Stage-0 Move Operator diff --git ql/src/test/results/clientpositive/union_view.q.out ql/src/test/results/clientpositive/union_view.q.out index dee11ec..45b170b 100644 --- ql/src/test/results/clientpositive/union_view.q.out +++ ql/src/test/results/clientpositive/union_view.q.out @@ -597,21 +597,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -641,21 +632,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -685,21 +667,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -781,21 +754,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -825,21 +789,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -869,21 +824,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -965,21 +911,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -1009,21 +946,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -1053,21 +981,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -1153,28 +1072,19 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col2 + type: string + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col2 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -1204,28 +1114,19 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col2 + type: string + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col2 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -1255,28 +1156,19 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: + Reduce Output Operator + key expressions: + expr: _col2 + type: string + sort order: + + tag: -1 + value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col2 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string Reduce Operator Tree: Extract File Output Operator @@ -1426,23 +1318,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -1464,23 +1351,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -1502,23 +1384,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1606,23 +1483,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -1640,23 +1512,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -1678,23 +1545,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1782,23 +1644,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -1820,23 +1677,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -1854,23 +1706,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1973,21 +1820,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -2017,21 +1855,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -2061,21 +1890,12 @@ STAGE PLANS: expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -2146,23 +1966,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery1-subquery2:subq-subquery1-subquery2:src_union_2 TableScan alias: src_union_2 @@ -2184,23 +1999,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint src_union_view-subquery2:subq-subquery2:src_union_3 TableScan alias: src_union_3 @@ -2218,23 +2028,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Union Select Operator - expressions: - expr: _col2 - type: string - outputColumnNames: _col2 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: diff --git ql/src/test/results/compiler/plan/join4.q.xml ql/src/test/results/compiler/plan/join4.q.xml index 71f8460..42c05e2 100644 --- ql/src/test/results/compiler/plan/join4.q.xml +++ ql/src/test/results/compiler/plan/join4.q.xml @@ -1685,255 +1685,77 @@ - - - - - - - - - #### A masked pattern was here #### - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - escape.delim - \ - - - - - - - 1 - - - - - - - CNTR_NAME_FS_11_NUM_INPUT_ROWS - - - CNTR_NAME_FS_11_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_11_TIME_TAKEN - - - CNTR_NAME_FS_11_FATAL_ERROR - - - - - FS_11 - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - - + + + + + #### A masked pattern was here #### - - - - - - _col3 - - - _col3 - - - - - + + 1 - - _col2 - - - _col2 - - - - - + + #### A masked pattern was here #### - - _col1 - - - _col1 - - - - - + + true - - _col0 - - - _col0 - - - - - - - - - - - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - + + + + columns + _col0,_col1,_col2,_col3 + + + serialization.format + 1 + + + columns.types + string:string:string:string + + + escape.delim + \ + + - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - + + 1 - CNTR_NAME_SEL_10_NUM_INPUT_ROWS + CNTR_NAME_FS_11_NUM_INPUT_ROWS - CNTR_NAME_SEL_10_NUM_OUTPUT_ROWS + CNTR_NAME_FS_11_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_10_TIME_TAKEN + CNTR_NAME_FS_11_TIME_TAKEN - CNTR_NAME_SEL_10_FATAL_ERROR + CNTR_NAME_FS_11_FATAL_ERROR - SEL_10 + FS_11 @@ -1948,14 +1770,11 @@ - - c1 - _col0 - c + @@ -1964,14 +1783,11 @@ - - c2 - _col1 - c + @@ -1980,14 +1796,11 @@ - - c3 - _col2 - c + @@ -1996,14 +1809,11 @@ - - c4 - _col3 - c + @@ -2022,7 +1832,7 @@ _col3 - + _col3 @@ -2036,7 +1846,7 @@ _col2 - + _col2 @@ -2050,7 +1860,7 @@ _col1 - + _col1 @@ -2064,7 +1874,7 @@ _col0 - + _col0 @@ -2083,16 +1893,16 @@ - + - + - + - + @@ -2146,9 +1956,15 @@ + + c1 + _col0 + + c + @@ -2156,9 +1972,15 @@ + + c2 + _col1 + + c + @@ -2166,9 +1988,15 @@ + + c3 + _col2 + + c + @@ -2176,9 +2004,15 @@ + + c4 + _col3 + + c + @@ -2196,7 +2030,7 @@ _col3 - + VALUE._col1 @@ -2210,7 +2044,7 @@ _col2 - + VALUE._col0 @@ -2224,7 +2058,7 @@ _col1 - + VALUE._col1 @@ -2238,7 +2072,7 @@ _col0 - + VALUE._col0 @@ -2274,10 +2108,10 @@ 0 - + - + @@ -2285,10 +2119,10 @@ 1 - + - + diff --git ql/src/test/results/compiler/plan/join5.q.xml ql/src/test/results/compiler/plan/join5.q.xml index 1907dd8..ecaaaff 100644 --- ql/src/test/results/compiler/plan/join5.q.xml +++ ql/src/test/results/compiler/plan/join5.q.xml @@ -1685,255 +1685,77 @@ - - - - - - - - - #### A masked pattern was here #### - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - escape.delim - \ - - - - - - - 1 - - - - - - - CNTR_NAME_FS_11_NUM_INPUT_ROWS - - - CNTR_NAME_FS_11_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_11_TIME_TAKEN - - - CNTR_NAME_FS_11_FATAL_ERROR - - - - - FS_11 - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - - + + + + + #### A masked pattern was here #### - - - - - - _col3 - - - _col3 - - - - - + + 1 - - _col2 - - - _col2 - - - - - + + #### A masked pattern was here #### - - _col1 - - - _col1 - - - - - + + true - - _col0 - - - _col0 - - - - - - - - - - - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - + + + + columns + _col0,_col1,_col2,_col3 + + + serialization.format + 1 + + + columns.types + string:string:string:string + + + escape.delim + \ + + - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - + + 1 - CNTR_NAME_SEL_10_NUM_INPUT_ROWS + CNTR_NAME_FS_11_NUM_INPUT_ROWS - CNTR_NAME_SEL_10_NUM_OUTPUT_ROWS + CNTR_NAME_FS_11_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_10_TIME_TAKEN + CNTR_NAME_FS_11_TIME_TAKEN - CNTR_NAME_SEL_10_FATAL_ERROR + CNTR_NAME_FS_11_FATAL_ERROR - SEL_10 + FS_11 @@ -1948,14 +1770,11 @@ - - c1 - _col0 - c + @@ -1964,14 +1783,11 @@ - - c2 - _col1 - c + @@ -1980,14 +1796,11 @@ - - c3 - _col2 - c + @@ -1996,14 +1809,11 @@ - - c4 - _col3 - c + @@ -2022,7 +1832,7 @@ _col3 - + _col3 @@ -2036,7 +1846,7 @@ _col2 - + _col2 @@ -2050,7 +1860,7 @@ _col1 - + _col1 @@ -2064,7 +1874,7 @@ _col0 - + _col0 @@ -2083,16 +1893,16 @@ - + - + - + - + @@ -2146,9 +1956,15 @@ + + c1 + _col0 + + c + @@ -2156,9 +1972,15 @@ + + c2 + _col1 + + c + @@ -2166,9 +1988,15 @@ + + c3 + _col2 + + c + @@ -2176,9 +2004,15 @@ + + c4 + _col3 + + c + @@ -2196,7 +2030,7 @@ _col3 - + VALUE._col1 @@ -2210,7 +2044,7 @@ _col2 - + VALUE._col0 @@ -2224,7 +2058,7 @@ _col1 - + VALUE._col1 @@ -2238,7 +2072,7 @@ _col0 - + VALUE._col0 @@ -2274,10 +2108,10 @@ 0 - + - + @@ -2285,10 +2119,10 @@ 1 - + - + diff --git ql/src/test/results/compiler/plan/join6.q.xml ql/src/test/results/compiler/plan/join6.q.xml index 85d06d3..f00ce9e 100644 --- ql/src/test/results/compiler/plan/join6.q.xml +++ ql/src/test/results/compiler/plan/join6.q.xml @@ -1685,255 +1685,77 @@ - - - - - - - - - #### A masked pattern was here #### - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - escape.delim - \ - - - - - - - 1 - - - - - - - CNTR_NAME_FS_11_NUM_INPUT_ROWS - - - CNTR_NAME_FS_11_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_11_TIME_TAKEN - - - CNTR_NAME_FS_11_FATAL_ERROR - - - - - FS_11 - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - - + + + + + #### A masked pattern was here #### - - - - - - _col3 - - - _col3 - - - - - + + 1 - - _col2 - - - _col2 - - - - - + + #### A masked pattern was here #### - - _col1 - - - _col1 - - - - - + + true - - _col0 - - - _col0 - - - - - - - - - - - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - + + + + columns + _col0,_col1,_col2,_col3 + + + serialization.format + 1 + + + columns.types + string:string:string:string + + + escape.delim + \ + + - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - + + 1 - CNTR_NAME_SEL_10_NUM_INPUT_ROWS + CNTR_NAME_FS_11_NUM_INPUT_ROWS - CNTR_NAME_SEL_10_NUM_OUTPUT_ROWS + CNTR_NAME_FS_11_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_10_TIME_TAKEN + CNTR_NAME_FS_11_TIME_TAKEN - CNTR_NAME_SEL_10_FATAL_ERROR + CNTR_NAME_FS_11_FATAL_ERROR - SEL_10 + FS_11 @@ -1948,14 +1770,11 @@ - - c1 - _col0 - c + @@ -1964,14 +1783,11 @@ - - c2 - _col1 - c + @@ -1980,14 +1796,11 @@ - - c3 - _col2 - c + @@ -1996,14 +1809,11 @@ - - c4 - _col3 - c + @@ -2022,7 +1832,7 @@ _col3 - + _col3 @@ -2036,7 +1846,7 @@ _col2 - + _col2 @@ -2050,7 +1860,7 @@ _col1 - + _col1 @@ -2064,7 +1874,7 @@ _col0 - + _col0 @@ -2083,16 +1893,16 @@ - + - + - + - + @@ -2146,9 +1956,15 @@ + + c1 + _col0 + + c + @@ -2156,9 +1972,15 @@ + + c2 + _col1 + + c + @@ -2166,9 +1988,15 @@ + + c3 + _col2 + + c + @@ -2176,9 +2004,15 @@ + + c4 + _col3 + + c + @@ -2196,7 +2030,7 @@ _col3 - + VALUE._col1 @@ -2210,7 +2044,7 @@ _col2 - + VALUE._col0 @@ -2224,7 +2058,7 @@ _col1 - + VALUE._col1 @@ -2238,7 +2072,7 @@ _col0 - + VALUE._col0 @@ -2274,10 +2108,10 @@ 0 - + - + @@ -2285,10 +2119,10 @@ 1 - + - + diff --git ql/src/test/results/compiler/plan/join7.q.xml ql/src/test/results/compiler/plan/join7.q.xml index 7692426..8f2cc36 100644 --- ql/src/test/results/compiler/plan/join7.q.xml +++ ql/src/test/results/compiler/plan/join7.q.xml @@ -2409,315 +2409,77 @@ - - - - - - - - - #### A masked pattern was here #### - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3,_col4,_col5 - - - serialization.format - 1 - - - columns.types - string:string:string:string:string:string - - - escape.delim - \ - - - - - - - 1 - - - - - - - CNTR_NAME_FS_15_NUM_INPUT_ROWS - - - CNTR_NAME_FS_15_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_15_TIME_TAKEN - - - CNTR_NAME_FS_15_FATAL_ERROR - - - - - FS_15 - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - _col4 - - - - - - - - - - - - - _col5 - - - - - - - - - - - - - - - - - - - - - _col5 - - - _col5 - - - - - - - - _col4 - - - _col4 - - - - - + + + + + #### A masked pattern was here #### - - _col3 - - - _col3 - - - - - + + 1 - - _col2 - - - _col2 - - - - - + + #### A masked pattern was here #### - - _col1 - - - _col1 - - - - - + + true - - _col0 - - - _col0 + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - + + org.apache.hadoop.mapred.TextInputFormat - - - - - - - - - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - - - - - - - - - - + + + + columns + _col0,_col1,_col2,_col3,_col4,_col5 + + + serialization.format + 1 + + + columns.types + string:string:string:string:string:string + + + escape.delim + \ + + - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - _col4 - - - _col5 - - + + 1 - CNTR_NAME_SEL_14_NUM_INPUT_ROWS + CNTR_NAME_FS_15_NUM_INPUT_ROWS - CNTR_NAME_SEL_14_NUM_OUTPUT_ROWS + CNTR_NAME_FS_15_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_14_TIME_TAKEN + CNTR_NAME_FS_15_TIME_TAKEN - CNTR_NAME_SEL_14_FATAL_ERROR + CNTR_NAME_FS_15_FATAL_ERROR - SEL_14 + FS_15 @@ -2732,14 +2494,11 @@ - - c1 - _col0 - c + @@ -2748,14 +2507,11 @@ - - c2 - _col1 - c + @@ -2764,14 +2520,11 @@ - - c3 - _col2 - c + @@ -2780,14 +2533,11 @@ - - c4 - _col3 - c + @@ -2796,14 +2546,11 @@ - - c5 - _col4 - c + @@ -2812,14 +2559,11 @@ - - c6 - _col5 - c + @@ -2838,7 +2582,7 @@ _col5 - + _col5 @@ -2852,7 +2596,7 @@ _col4 - + _col4 @@ -2866,7 +2610,7 @@ _col3 - + _col3 @@ -2880,7 +2624,7 @@ _col2 - + _col2 @@ -2894,7 +2638,7 @@ _col1 - + _col1 @@ -2908,7 +2652,7 @@ _col0 - + _col0 @@ -2927,22 +2671,22 @@ - + - + - + - + - + - + @@ -3002,9 +2746,15 @@ + + c1 + _col0 + + c + @@ -3012,9 +2762,15 @@ + + c2 + _col1 + + c + @@ -3022,9 +2778,15 @@ + + c3 + _col2 + + c + @@ -3032,9 +2794,15 @@ + + c4 + _col3 + + c + @@ -3042,9 +2810,15 @@ + + c5 + _col4 + + c + @@ -3052,9 +2826,15 @@ + + c6 + _col5 + + c + @@ -3072,7 +2852,7 @@ _col5 - + VALUE._col1 @@ -3086,7 +2866,7 @@ _col4 - + VALUE._col0 @@ -3100,7 +2880,7 @@ _col3 - + VALUE._col1 @@ -3114,7 +2894,7 @@ _col2 - + VALUE._col0 @@ -3128,7 +2908,7 @@ _col1 - + VALUE._col1 @@ -3142,7 +2922,7 @@ _col0 - + VALUE._col0 @@ -3188,10 +2968,10 @@ 0 - + - + @@ -3199,10 +2979,10 @@ 1 - + - + @@ -3210,10 +2990,10 @@ 2 - + - + diff --git ql/src/test/results/compiler/plan/join8.q.xml ql/src/test/results/compiler/plan/join8.q.xml index 36ab37d..d4a3775 100644 --- ql/src/test/results/compiler/plan/join8.q.xml +++ ql/src/test/results/compiler/plan/join8.q.xml @@ -1771,255 +1771,77 @@ - - - - - - - - - #### A masked pattern was here #### - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - escape.delim - \ - - - - - - - 1 - - - - - - - CNTR_NAME_FS_12_NUM_INPUT_ROWS - - - CNTR_NAME_FS_12_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_12_TIME_TAKEN - - - CNTR_NAME_FS_12_FATAL_ERROR - - - - - FS_12 - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - - + + + + + #### A masked pattern was here #### - - - - - - _col3 - - - _col3 - - - - - + + 1 - - _col2 - - - _col2 - - - - - + + #### A masked pattern was here #### - - _col1 - - - _col1 - - - - - + + true - - _col0 - - - _col0 - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - - - - - - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - + + + + columns + _col0,_col1,_col2,_col3 + + + serialization.format + 1 + + + columns.types + string:string:string:string + + + escape.delim + \ + + - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - + + 1 - CNTR_NAME_SEL_11_NUM_INPUT_ROWS + CNTR_NAME_FS_12_NUM_INPUT_ROWS - CNTR_NAME_SEL_11_NUM_OUTPUT_ROWS + CNTR_NAME_FS_12_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_11_TIME_TAKEN + CNTR_NAME_FS_12_TIME_TAKEN - CNTR_NAME_SEL_11_FATAL_ERROR + CNTR_NAME_FS_12_FATAL_ERROR - SEL_11 + FS_12 @@ -2034,14 +1856,11 @@ - - c1 - _col0 - c + @@ -2050,14 +1869,11 @@ - - c2 - _col1 - c + @@ -2066,14 +1882,11 @@ - - c3 - _col2 - c + @@ -2082,14 +1895,11 @@ - - c4 - _col3 - c + @@ -2108,7 +1918,7 @@ _col3 - + _col3 @@ -2122,7 +1932,7 @@ _col2 - + _col2 @@ -2136,7 +1946,7 @@ _col1 - + _col1 @@ -2150,7 +1960,7 @@ _col0 - + _col0 @@ -2169,16 +1979,16 @@ - + - + - + - + @@ -2232,9 +2042,15 @@ + + c1 + _col0 + + c + @@ -2242,9 +2058,15 @@ + + c2 + _col1 + + c + @@ -2252,9 +2074,15 @@ + + c3 + _col2 + + c + @@ -2262,9 +2090,15 @@ + + c4 + _col3 + + c + @@ -2285,7 +2119,7 @@ - + @@ -2393,7 +2227,7 @@ _col3 - + VALUE._col1 @@ -2407,7 +2241,7 @@ _col2 - + VALUE._col0 @@ -2421,7 +2255,7 @@ _col1 - + VALUE._col1 @@ -2435,7 +2269,7 @@ _col0 - + VALUE._col0 @@ -2471,10 +2305,10 @@ 0 - + - + @@ -2482,10 +2316,10 @@ 1 - + - + diff --git ql/src/test/results/compiler/plan/sample7.q.xml ql/src/test/results/compiler/plan/sample7.q.xml index 1ea3daa..84aba82 100644 --- ql/src/test/results/compiler/plan/sample7.q.xml +++ ql/src/test/results/compiler/plan/sample7.q.xml @@ -701,202 +701,107 @@ - + - - - - - - - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - - 200 - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - - 1 - - - - - - - CNTR_NAME_FS_4_NUM_INPUT_ROWS - - - CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_4_TIME_TAKEN - - - CNTR_NAME_FS_4_FATAL_ERROR - - - - - FS_4 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + #### A masked pattern was here #### - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + - - - - _col0 - - - _col1 - - + + 200 + + + 1 - + + #### A masked pattern was here #### + + true + + + + + 1 + - CNTR_NAME_SEL_3_NUM_INPUT_ROWS + CNTR_NAME_FS_4_NUM_INPUT_ROWS - CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS + CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_3_TIME_TAKEN + CNTR_NAME_FS_4_TIME_TAKEN - CNTR_NAME_SEL_3_FATAL_ERROR + CNTR_NAME_FS_4_FATAL_ERROR - SEL_3 + FS_4 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - + + + + + + + + + + _col1 + + + value + + + s + + + + + + + + _col0 + + + key + + + s + + + + + int @@ -905,11 +810,103 @@ - - + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + true - + + + + + + CNTR_NAME_SEL_3_NUM_INPUT_ROWS + + + CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS + + + CNTR_NAME_SEL_3_TIME_TAKEN + + + CNTR_NAME_SEL_3_FATAL_ERROR + + + + + SEL_3 + + + + + + + + + + + + + + + + _col0 + + + s + + + + + + + + + + _col1 + + + s + + + + + + + + + + + + + + + + + + + + + @@ -1038,108 +1035,47 @@ - - - - - - - - - CNTR_NAME_FIL_1_NUM_INPUT_ROWS - - - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS - - - CNTR_NAME_FIL_1_TIME_TAKEN - - - CNTR_NAME_FIL_1_FATAL_ERROR - - - - - FIL_1 - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - value - - - s + + + + + + + key + + + s + + + + + - - + + + + + + + 100 + + - - - - - - - - - - - - - - - - - - key - - - s - - - + + - - - - - - - - 100 + - + @@ -1179,10 +1115,30 @@ - + + + key + + + s + + + + + - + + + value + + + s + + + + + diff --git ql/src/test/results/compiler/plan/subq.q.xml ql/src/test/results/compiler/plan/subq.q.xml index cdbd713..30e8a0d 100644 --- ql/src/test/results/compiler/plan/subq.q.xml +++ ql/src/test/results/compiler/plan/subq.q.xml @@ -570,146 +570,50 @@ - - - - - - - - - 1 - - - #### A masked pattern was here #### - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - - 1 - - - - - - - CNTR_NAME_FS_4_NUM_INPUT_ROWS - - - CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_4_TIME_TAKEN - - - CNTR_NAME_FS_4_FATAL_ERROR - - - - - FS_4 - - - - - - - - - - - - - - - - - - - _col1 - - - _col1 - - - src - - - - - + + + + + 1 - - _col0 - - - _col0 - - - src - - - - - + + #### A masked pattern was here #### - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + #### A masked pattern was here #### - + true + + + + + 1 + - CNTR_NAME_SEL_3_NUM_INPUT_ROWS + CNTR_NAME_FS_4_NUM_INPUT_ROWS - CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS + CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_3_TIME_TAKEN + CNTR_NAME_FS_4_TIME_TAKEN - CNTR_NAME_SEL_3_FATAL_ERROR + CNTR_NAME_FS_4_FATAL_ERROR - SEL_3 + FS_4 @@ -719,44 +623,7 @@ - - - - - - - key - - - _col0 - - - src - - - - - - - - - - value - - - _col1 - - - src - - - - - - - - - + @@ -766,7 +633,7 @@ _col1 - + value @@ -780,7 +647,7 @@ _col0 - + key @@ -799,10 +666,10 @@ - + - + @@ -853,6 +720,9 @@ + + key + _col0 @@ -866,6 +736,9 @@ + + value + _col1