Index: eclipse-templates/TestMTQueries.launchtemplate =================================================================== --- eclipse-templates/TestMTQueries.launchtemplate (revision 0) +++ eclipse-templates/TestMTQueries.launchtemplate (revision 0) @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + Index: eclipse-templates/TestTruncate.launchtemplate =================================================================== --- eclipse-templates/TestTruncate.launchtemplate (revision 0) +++ eclipse-templates/TestTruncate.launchtemplate (revision 0) @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + Index: build-common.xml =================================================================== --- build-common.xml (revision 736746) +++ build-common.xml (working copy) @@ -241,6 +241,7 @@ --> + Index: ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/cast1.q.out (revision 736746) +++ ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -16,7 +16,7 @@ type: string Filter Operator predicate: - expr: (0 = 86) + expr: (UDFToDouble(0) = UDFToDouble(86)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/quote1.q.out =================================================================== --- ql/src/test/results/clientpositive/quote1.q.out (revision 736746) +++ ql/src/test/results/clientpositive/quote1.q.out (working copy) @@ -12,7 +12,7 @@ src Filter Operator predicate: - expr: ((key >= 200) and (key < 300)) + expr: ((UDFToDouble(key) >= UDFToDouble(200)) and (UDFToDouble(key) < UDFToDouble(300))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/notable_alias2.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias2.q.out (revision 736746) +++ ql/src/test/results/clientpositive/notable_alias2.q.out (working copy) @@ -17,7 +17,7 @@ type: string Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Reduce Output Operator key expressions: @@ -49,7 +49,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/55994326/602553370.10001 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/903914190/338096343.10001 Reduce Output Operator key expressions: expr: 0 Index: ql/src/test/results/clientpositive/join6.q.out =================================================================== --- ql/src/test/results/clientpositive/join6.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join6.q.out (working copy) @@ -12,7 +12,7 @@ c:b:src2 Filter Operator predicate: - expr: ((key > 15) and (key < 25)) + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) type: boolean Select Operator expressions: @@ -37,7 +37,7 @@ c:a:src1 Filter Operator predicate: - expr: ((key > 10) and (key < 20)) + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input7.q.out =================================================================== --- ql/src/test/results/clientpositive/input7.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input7.q.out (working copy) @@ -22,7 +22,7 @@ type: string Select Operator expressions: - expr: UDFToDouble(UDFToBoolean(0)) + expr: UDFToDouble(0) type: double expr: UDFToInteger(1) type: int Index: ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf1.q.out (revision 736746) +++ ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -16,7 +16,7 @@ type: string Filter Operator predicate: - expr: (0 = 86) + expr: (UDFToDouble(0) = UDFToDouble(86)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join12.q.out =================================================================== --- ql/src/test/results/clientpositive/join12.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join12.q.out (working copy) @@ -39,7 +39,7 @@ type: string Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Reduce Output Operator key expressions: @@ -64,7 +64,7 @@ type: string Filter Operator predicate: - expr: (0 < 80) + expr: (UDFToDouble(0) < UDFToDouble(80)) type: boolean Reduce Output Operator key expressions: Index: ql/src/test/results/clientpositive/input13.q.out =================================================================== --- ql/src/test/results/clientpositive/input13.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input13.q.out (working copy) @@ -12,7 +12,7 @@ src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: @@ -35,7 +35,7 @@ name: dest1 Filter Operator predicate: - expr: ((key >= 100) and (key < 200)) + expr: ((UDFToDouble(key) >= UDFToDouble(100)) and (UDFToDouble(key) < UDFToDouble(200))) type: boolean Select Operator expressions: @@ -58,7 +58,7 @@ name: dest2 Filter Operator predicate: - expr: ((key >= 200) and (key < 300)) + expr: ((UDFToDouble(key) >= UDFToDouble(200)) and (UDFToDouble(key) < UDFToDouble(300))) type: boolean Select Operator expressions: @@ -77,7 +77,7 @@ name: dest3 Filter Operator predicate: - expr: (key >= 300) + expr: (UDFToDouble(key) >= UDFToDouble(300)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join16.q.out =================================================================== --- ql/src/test/results/clientpositive/join16.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join16.q.out (working copy) @@ -12,7 +12,7 @@ subq:a Filter Operator predicate: - expr: (key > 10) + expr: (UDFToDouble(key) > UDFToDouble(10)) type: boolean Select Operator expressions: @@ -22,7 +22,7 @@ type: string Filter Operator predicate: - expr: (0 > 20) + expr: (UDFToDouble(0) > UDFToDouble(20)) type: boolean Reduce Output Operator key expressions: @@ -70,7 +70,7 @@ 1 {VALUE.0} {VALUE.1} Filter Operator predicate: - expr: (3 < 200) + expr: (UDFToDouble(3) < UDFToDouble(200)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input11_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input11_limit.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input11_limit.q.out (working copy) @@ -12,7 +12,7 @@ src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/notable_alias1.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias1.q.out (revision 736746) +++ ql/src/test/results/clientpositive/notable_alias1.q.out (working copy) @@ -17,7 +17,7 @@ type: string Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Reduce Output Operator key expressions: @@ -49,7 +49,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/10020404/161743246.10001 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/562761632/249466954.10001 Reduce Output Operator key expressions: expr: 0 Index: ql/src/test/results/clientpositive/join5.q.out =================================================================== --- ql/src/test/results/clientpositive/join5.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join5.q.out (working copy) @@ -12,7 +12,7 @@ c:b:src2 Filter Operator predicate: - expr: ((key > 15) and (key < 25)) + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) type: boolean Select Operator expressions: @@ -37,7 +37,7 @@ c:a:src1 Filter Operator predicate: - expr: ((key > 10) and (key < 20)) + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join11.q.out =================================================================== --- ql/src/test/results/clientpositive/join11.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join11.q.out (working copy) @@ -39,7 +39,7 @@ type: string Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Reduce Output Operator key expressions: Index: ql/src/test/results/clientpositive/input12.q.out =================================================================== --- ql/src/test/results/clientpositive/input12.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input12.q.out (working copy) @@ -12,7 +12,7 @@ src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: @@ -35,7 +35,7 @@ name: dest1 Filter Operator predicate: - expr: ((key >= 100) and (key < 200)) + expr: ((UDFToDouble(key) >= UDFToDouble(100)) and (UDFToDouble(key) < UDFToDouble(200))) type: boolean Select Operator expressions: @@ -58,7 +58,7 @@ name: dest2 Filter Operator predicate: - expr: (key >= 200) + expr: (UDFToDouble(key) >= UDFToDouble(200)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input1_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input1_limit.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input1_limit.q.out (working copy) @@ -14,7 +14,7 @@ src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: @@ -33,7 +33,7 @@ type: string Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: @@ -85,7 +85,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/196681773/625336699.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/2110365113/704872924.10002 Reduce Output Operator sort order: tag: -1 Index: ql/src/test/results/clientpositive/groupby3_map.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3_map.q.out (revision 736746) +++ ql/src/test/results/clientpositive/groupby3_map.q.out (working copy) @@ -20,8 +20,8 @@ expr: avg(DISTINCT UDFToDouble(substr(0, 4))) expr: sum(UDFToDouble(substr(0, 4))) expr: avg(UDFToDouble(substr(0, 4))) - expr: min(UDFToDouble(substr(0, 4))) - expr: max(UDFToDouble(substr(0, 4))) + expr: min(substr(0, 4)) + expr: max(substr(0, 4)) keys: expr: substr(0, 4) type: string @@ -43,15 +43,15 @@ expr: 3 type: string expr: 4 - type: double + type: string expr: 5 - type: double + type: string Reduce Operator Tree: Group By Operator aggregations: expr: avg(DISTINCT UDFToDouble(KEY.0)) expr: sum(VALUE.1) - expr: avg(VALUE.2) + expr: avg(UDFToDouble(VALUE.2)) expr: min(VALUE.3) expr: max(VALUE.4) mode: partial2 @@ -65,7 +65,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/151053238/135596442.10001 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/885399493/544614010.10001 Reduce Output Operator sort order: tag: -1 @@ -77,15 +77,15 @@ expr: 2 type: string expr: 3 - type: double + type: string expr: 4 - type: double + type: string Reduce Operator Tree: Group By Operator aggregations: - expr: avg(VALUE.0) + expr: avg(UDFToDouble(VALUE.0)) expr: sum(VALUE.1) - expr: avg(VALUE.2) + expr: avg(UDFToDouble(VALUE.2)) expr: min(VALUE.3) expr: max(VALUE.4) mode: final @@ -98,16 +98,28 @@ expr: 0 type: double expr: 4 - type: double + type: string expr: 3 - type: double - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - name: dest1 + type: string + Select Operator + expressions: + expr: 0 + type: double + expr: 1 + type: double + expr: 2 + type: double + expr: UDFToDouble(3) + type: double + expr: UDFToDouble(4) + type: double + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + name: dest1 Stage: Stage-0 Move Operator @@ -120,4 +132,4 @@ name: dest1 -130091.0 260.182 256.10355987055016 498.0 0.0 +130091.0 NULL NULL 98.0 98.0 Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 736746) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -16,7 +16,7 @@ type: boolean Filter Operator predicate: - expr: (key > 100) + expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean Select Operator expressions: @@ -32,7 +32,7 @@ type: string File Output Operator compressed: false - directory: /tmp/hive-zshao/444520495/211065154.10000.insclause-0 + directory: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/390012861/922014023.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -45,14 +45,14 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcbucket/kv1.txt + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcbucket/kv1.txt Path -> Partition: - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcbucket/kv1.txt + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcbucket/kv1.txt Partition input format: org.apache.hadoop.mapred.TextInputFormat @@ -67,7 +67,7 @@ serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcbucket + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcbucket serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: srcbucket @@ -75,7 +75,7 @@ Move Operator tables: replace: true - source: /tmp/hive-zshao/444520495/211065154.10000.insclause-0 + source: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/390012861/922014023.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -88,7 +88,7 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest1 Index: ql/src/test/results/clientpositive/join0.q.out =================================================================== --- ql/src/test/results/clientpositive/join0.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join0.q.out (working copy) @@ -13,7 +13,7 @@ src2:src Filter Operator predicate: - expr: (key < 10) + expr: (UDFToDouble(key) < UDFToDouble(10)) type: boolean Select Operator expressions: @@ -32,7 +32,7 @@ src1:src Filter Operator predicate: - expr: (key < 10) + expr: (UDFToDouble(key) < UDFToDouble(10)) type: boolean Select Operator expressions: @@ -75,7 +75,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /data/users/pchakka/workspace/oshive/build/ql/tmp/1608930740/15144587.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/143976251/534107059.10002 Reduce Output Operator key expressions: expr: 0 Index: ql/src/test/results/clientpositive/cluster.q.out =================================================================== --- ql/src/test/results/clientpositive/cluster.q.out (revision 736746) +++ ql/src/test/results/clientpositive/cluster.q.out (working copy) @@ -12,7 +12,7 @@ x Filter Operator predicate: - expr: (key = 10) + expr: (UDFToDouble(key) = UDFToDouble(10)) type: boolean Select Operator expressions: @@ -62,7 +62,7 @@ x Filter Operator predicate: - expr: (key = 20) + expr: (UDFToDouble(key) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -112,7 +112,7 @@ x Filter Operator predicate: - expr: (key = 20) + expr: (UDFToDouble(key) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -162,7 +162,7 @@ x Filter Operator predicate: - expr: (key = 20) + expr: (UDFToDouble(key) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -212,7 +212,7 @@ x Filter Operator predicate: - expr: (key = 20) + expr: (UDFToDouble(key) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -262,7 +262,7 @@ x Filter Operator predicate: - expr: (key = 20) + expr: (UDFToDouble(key) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -312,7 +312,7 @@ x Filter Operator predicate: - expr: (key = 20) + expr: (UDFToDouble(key) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -384,7 +384,7 @@ Extract Filter Operator predicate: - expr: (0 = 20) + expr: (UDFToDouble(0) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -457,7 +457,7 @@ 1 {VALUE.0} Filter Operator predicate: - expr: (0 = 20) + expr: (UDFToDouble(0) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -477,7 +477,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /data/users/pchakka/workspace/oshive/build/ql/tmp/4095681/630596716.10002 + /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/268129150/10750666.10002 Reduce Output Operator key expressions: expr: 1 @@ -559,7 +559,7 @@ 1 {VALUE.0} {VALUE.1} Filter Operator predicate: - expr: (0 = 20) + expr: (UDFToDouble(0) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -581,7 +581,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /data/users/pchakka/workspace/oshive/build/ql/tmp/462292647/163669153.10002 + /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/223878920/107499083.10002 Reduce Output Operator key expressions: expr: 1 @@ -665,7 +665,7 @@ 1 {VALUE.0} {VALUE.1} Filter Operator predicate: - expr: (0 = 20) + expr: (UDFToDouble(0) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -687,7 +687,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /data/users/pchakka/workspace/oshive/build/ql/tmp/72781939/1364102870.10002 + /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/273045729/75938714.10002 Reduce Output Operator key expressions: expr: 0 @@ -773,7 +773,7 @@ 1 {VALUE.0} Filter Operator predicate: - expr: (0 = 20) + expr: (UDFToDouble(0) = UDFToDouble(20)) type: boolean Select Operator expressions: @@ -793,7 +793,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /data/users/pchakka/workspace/oshive/build/ql/tmp/808635/840293573.10002 + /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/294539045/192652633.10002 Reduce Output Operator key expressions: expr: 0 @@ -838,7 +838,7 @@ null-subquery1:unioninput-subquery1:src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: @@ -869,7 +869,7 @@ null-subquery2:unioninput-subquery2:src Filter Operator predicate: - expr: (key > 100) + expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join4.q.out =================================================================== --- ql/src/test/results/clientpositive/join4.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join4.q.out (working copy) @@ -12,7 +12,7 @@ c:b:src2 Filter Operator predicate: - expr: ((key > 15) and (key < 25)) + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) type: boolean Select Operator expressions: @@ -37,7 +37,7 @@ c:a:src1 Filter Operator predicate: - expr: ((key > 10) and (key < 20)) + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join8.q.out =================================================================== --- ql/src/test/results/clientpositive/join8.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join8.q.out (working copy) @@ -12,7 +12,7 @@ c:b:src2 Filter Operator predicate: - expr: ((key > 15) and (key < 25)) + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) type: boolean Select Operator expressions: @@ -37,7 +37,7 @@ c:a:src1 Filter Operator predicate: - expr: ((key > 10) and (key < 20)) + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input9.q.out =================================================================== --- ql/src/test/results/clientpositive/input9.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input9.q.out (working copy) @@ -26,7 +26,7 @@ type: string Select Operator expressions: - expr: UDFToString(UDFToBoolean(0)) + expr: UDFToString(0) type: string expr: UDFToInteger(1) type: int Index: ql/src/test/results/clientpositive/udf3.q.out =================================================================== --- ql/src/test/results/clientpositive/udf3.q.out (revision 736746) +++ ql/src/test/results/clientpositive/udf3.q.out (working copy) @@ -24,11 +24,11 @@ Reduce Operator Tree: Group By Operator aggregations: - expr: max(UDFToDouble(VALUE.0)) + expr: max(VALUE.0) expr: avg(UDFToDouble(VALUE.0)) expr: count(VALUE.0) expr: sum(UDFToDouble(VALUE.0)) - expr: min(UDFToDouble(VALUE.0)) + expr: min(VALUE.0) mode: partial1 File Output Operator compressed: false @@ -40,13 +40,13 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/546652046/1647731334.10001 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/68854306/763012378.10001 Reduce Output Operator sort order: tag: -1 value expressions: expr: 0 - type: double + type: int expr: 1 type: string expr: 2 @@ -54,12 +54,12 @@ expr: 3 type: double expr: 4 - type: double + type: int Reduce Operator Tree: Group By Operator aggregations: expr: max(VALUE.0) - expr: avg(VALUE.1) + expr: avg(UDFToDouble(VALUE.1)) expr: count(VALUE.2) expr: sum(VALUE.3) expr: min(VALUE.4) @@ -73,9 +73,9 @@ expr: 1 type: double expr: 4 - type: double + type: int expr: 0 - type: double + type: int File Output Operator compressed: false table: Index: ql/src/test/results/clientpositive/noalias_subq1.q.out =================================================================== --- ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 736746) +++ ql/src/test/results/clientpositive/noalias_subq1.q.out (working copy) @@ -18,7 +18,7 @@ type: string Filter Operator predicate: - expr: (1 < 100) + expr: (UDFToDouble(1) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input11.q.out =================================================================== --- ql/src/test/results/clientpositive/input11.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input11.q.out (working copy) @@ -12,7 +12,7 @@ src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join14.q.out =================================================================== --- ql/src/test/results/clientpositive/join14.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join14.q.out (working copy) @@ -45,7 +45,7 @@ type: string Filter Operator predicate: - expr: (0 > 100) + expr: (UDFToDouble(0) > UDFToDouble(100)) type: boolean Reduce Output Operator key expressions: Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -12,7 +12,7 @@ srcpart Filter Operator predicate: - expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) + expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) and (hr = '12')) type: boolean Select Operator expressions: @@ -36,7 +36,7 @@ type: string File Output Operator compressed: false - directory: /tmp/hive-zshao/591856793/204210378.10000.insclause-0 + directory: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/108623150/305040120.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -49,12 +49,12 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest1 Filter Operator predicate: - expr: (((key < 100) and (ds = '2008-04-09')) and (hr = '12')) + expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-09')) and (hr = '12')) type: boolean Select Operator expressions: @@ -78,7 +78,7 @@ type: string File Output Operator compressed: false - directory: /tmp/hive-zshao/591856793/204210378.10001.insclause-1 + directory: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/108623150/305040120.10001.insclause-1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -91,15 +91,15 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest2 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest2 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Path -> Partition: - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -117,10 +117,10 @@ serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: srcpart - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 Partition partition values: ds 2008-04-09 @@ -138,7 +138,7 @@ serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: srcpart @@ -146,7 +146,7 @@ Move Operator tables: replace: true - source: /tmp/hive-zshao/591856793/204210378.10000.insclause-0 + source: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/108623150/305040120.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -159,11 +159,11 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest1 replace: true - source: /tmp/hive-zshao/591856793/204210378.10001.insclause-1 + source: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/108623150/305040120.10001.insclause-1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -176,7 +176,7 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest2 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest2 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest2 Index: ql/src/test/results/clientpositive/subq.q.out =================================================================== --- ql/src/test/results/clientpositive/subq.q.out (revision 736746) +++ ql/src/test/results/clientpositive/subq.q.out (working copy) @@ -12,7 +12,7 @@ unioninput:src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/groupby3.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby3.q.out (revision 736746) +++ ql/src/test/results/clientpositive/groupby3.q.out (working copy) @@ -30,8 +30,8 @@ expr: avg(DISTINCT UDFToDouble(KEY.0)) expr: sum(UDFToDouble(KEY.0)) expr: avg(UDFToDouble(KEY.0)) - expr: min(UDFToDouble(KEY.0)) - expr: max(UDFToDouble(KEY.0)) + expr: min(KEY.0) + expr: max(KEY.0) mode: partial1 File Output Operator compressed: false @@ -43,7 +43,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/695280947/659390410.10001 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/191882138/597396542.10001 Reduce Output Operator sort order: tag: -1 @@ -55,15 +55,15 @@ expr: 2 type: string expr: 3 - type: double + type: string expr: 4 - type: double + type: string Reduce Operator Tree: Group By Operator aggregations: - expr: avg(VALUE.0) + expr: avg(UDFToDouble(VALUE.0)) expr: sum(VALUE.1) - expr: avg(VALUE.2) + expr: avg(UDFToDouble(VALUE.2)) expr: min(VALUE.3) expr: max(VALUE.4) mode: final @@ -76,16 +76,28 @@ expr: 0 type: double expr: 4 - type: double + type: string expr: 3 - type: double - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe - name: dest1 + type: string + Select Operator + expressions: + expr: 0 + type: double + expr: 1 + type: double + expr: 2 + type: double + expr: UDFToDouble(3) + type: double + expr: UDFToDouble(4) + type: double + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe + name: dest1 Stage: Stage-0 Move Operator @@ -98,4 +110,4 @@ name: dest1 -130091.0 260.182 256.10355987055016 498.0 0.0 +130091.0 NULL NULL 98.0 98.0 Index: ql/src/test/results/clientpositive/input_part6.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part6.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input_part6.q.out (working copy) @@ -12,7 +12,7 @@ x Filter Operator predicate: - expr: (ds = ((2008 - 4) - 8)) + expr: (UDFToDouble(ds) = UDFToDouble(((2008 - 4) - 8))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/subq2.q.out =================================================================== --- ql/src/test/results/clientpositive/subq2.q.out (revision 736746) +++ ql/src/test/results/clientpositive/subq2.q.out (working copy) @@ -45,7 +45,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /tmp/hive-zshao/368989435/823759952.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/217677179/137363579.10002 Reduce Output Operator key expressions: expr: 0 @@ -74,7 +74,7 @@ type: bigint Filter Operator predicate: - expr: (0 >= 90) + expr: (UDFToDouble(0) >= UDFToDouble(90)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input14_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input14_limit.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input14_limit.q.out (working copy) @@ -49,7 +49,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - /data/users/pchakka/workspace/oshive/build/ql/tmp/557187098/238412176.10001 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/456652865/144848153.10001 Reduce Output Operator key expressions: expr: 0 @@ -70,7 +70,7 @@ Limit Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/implicit_cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/implicit_cast1.q.out (revision 0) +++ ql/src/test/results/clientpositive/implicit_cast1.q.out (revision 0) @@ -0,0 +1,33 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF implicit_test1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF implicit_test1))) (TOK_WHERE (<> (TOK_COLREF implicit_test1 a) 0)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + implicit_test1 + Filter Operator + predicate: + expr: (UDFToDouble(a) <> UDFToDouble(0)) + type: boolean + Select Operator + expressions: + expr: a + type: bigint + expr: b + type: string + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + Index: ql/src/test/results/clientpositive/join7.q.out =================================================================== --- ql/src/test/results/clientpositive/join7.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join7.q.out (working copy) @@ -12,7 +12,7 @@ c:b:src2 Filter Operator predicate: - expr: ((key > 15) and (key < 25)) + expr: ((UDFToDouble(key) > UDFToDouble(15)) and (UDFToDouble(key) < UDFToDouble(25))) type: boolean Select Operator expressions: @@ -37,7 +37,7 @@ c:a:src1 Filter Operator predicate: - expr: ((key > 10) and (key < 20)) + expr: ((UDFToDouble(key) > UDFToDouble(10)) and (UDFToDouble(key) < UDFToDouble(20))) type: boolean Select Operator expressions: @@ -62,7 +62,7 @@ c:c:src3 Filter Operator predicate: - expr: ((key > 20) and (key < 25)) + expr: ((UDFToDouble(key) > UDFToDouble(20)) and (UDFToDouble(key) < UDFToDouble(25))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input8.q.out =================================================================== --- ql/src/test/results/clientpositive/input8.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input8.q.out (working copy) @@ -16,19 +16,19 @@ type: string Select Operator expressions: - expr: (4 + null) - type: int + expr: (UDFToDouble(4) + null) + type: double expr: (UDFToDouble(0) - null) type: double expr: (null + null) - type: tinyint + type: double Select Operator expressions: expr: UDFToString(0) type: string expr: UDFToInteger(1) type: int - expr: UDFToDouble(2) + expr: 2 type: double File Output Operator compressed: false Index: ql/src/test/results/clientpositive/union.q.out =================================================================== --- ql/src/test/results/clientpositive/union.q.out (revision 736746) +++ ql/src/test/results/clientpositive/union.q.out (working copy) @@ -12,7 +12,7 @@ null-subquery1:unioninput-subquery1:src Filter Operator predicate: - expr: (key < 100) + expr: (UDFToDouble(key) < UDFToDouble(100)) type: boolean Select Operator expressions: @@ -34,7 +34,7 @@ null-subquery2:unioninput-subquery2:src Filter Operator predicate: - expr: (key > 100) + expr: (UDFToDouble(key) > UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/join13.q.out =================================================================== --- ql/src/test/results/clientpositive/join13.q.out (revision 736746) +++ ql/src/test/results/clientpositive/join13.q.out (working copy) @@ -40,7 +40,7 @@ type: string Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Reduce Output Operator key expressions: @@ -101,7 +101,7 @@ type: string Filter Operator predicate: - expr: (0 < 200) + expr: (UDFToDouble(0) < UDFToDouble(200)) type: boolean Reduce Output Operator key expressions: Index: ql/src/test/results/clientpositive/input14.q.out =================================================================== --- ql/src/test/results/clientpositive/input14.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input14.q.out (working copy) @@ -39,7 +39,7 @@ Extract Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -12,7 +12,7 @@ srcpart Filter Operator predicate: - expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) + expr: (((UDFToDouble(key) < UDFToDouble(100)) and (ds = '2008-04-08')) and (hr = '12')) type: boolean Select Operator expressions: @@ -36,7 +36,7 @@ type: string File Output Operator compressed: false - directory: /tmp/hive-zshao/293656751/393363001.10000.insclause-0 + directory: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1332023401/49833095.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -49,14 +49,14 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest1 Needs Tagging: false Path -> Alias: - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Path -> Partition: - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition partition values: ds 2008-04-08 @@ -74,7 +74,7 @@ serialization.lib org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/srcpart + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe name: srcpart @@ -82,7 +82,7 @@ Move Operator tables: replace: true - source: /tmp/hive-zshao/293656751/393363001.10000.insclause-0 + source: /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1332023401/49833095.10000.insclause-0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -95,7 +95,7 @@ serialization.lib org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat - location file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/dest1 + location file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe name: dest1 Index: ql/src/test/results/clientpositive/input18.q.out =================================================================== --- ql/src/test/results/clientpositive/input18.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input18.q.out (working copy) @@ -43,7 +43,7 @@ Extract Filter Operator predicate: - expr: (0 < 100) + expr: (UDFToDouble(0) < UDFToDouble(100)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input_part5.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part5.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input_part5.q.out (working copy) @@ -12,7 +12,7 @@ x Filter Operator predicate: - expr: ((ds = '2008-04-08') and (key < 100)) + expr: ((ds = '2008-04-08') and (UDFToDouble(key) < UDFToDouble(100))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/input2_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input2_limit.q.out (revision 736746) +++ ql/src/test/results/clientpositive/input2_limit.q.out (working copy) @@ -12,7 +12,7 @@ x Filter Operator predicate: - expr: (key < 300) + expr: (UDFToDouble(key) < UDFToDouble(300)) type: boolean Select Operator expressions: Index: ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input2.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-njain/104100889/1367774772.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/238674319/24398083.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 @@ -100,7 +100,7 @@ true - /tmp/hive-njain/104100889/1367774772.10001.insclause-1 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/238674319/24398083.10001.insclause-1 @@ -149,7 +149,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/dest2 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest2 @@ -178,7 +178,7 @@ true - /tmp/hive-njain/104100889/1367774772.10002.insclause-2 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/238674319/24398083.10002.insclause-2 @@ -231,7 +231,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/dest3 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest3 @@ -275,7 +275,7 @@ - /tmp/hive-njain/104100889/1367774772.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/238674319/24398083.10000.insclause-0 @@ -382,14 +382,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -397,33 +397,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + - + java.lang.Boolean @@ -483,7 +535,7 @@ - /tmp/hive-njain/104100889/1367774772.10001.insclause-1 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/238674319/24398083.10001.insclause-1 @@ -603,14 +655,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqualOrGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -618,29 +670,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 100 + + + + + - - 100 - - + @@ -650,14 +750,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -665,36 +765,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 200 + + + + + - - 200 - - + - + @@ -729,7 +877,7 @@ - /tmp/hive-njain/104100889/1367774772.10002.insclause-2 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/238674319/24398083.10002.insclause-2 @@ -763,7 +911,7 @@ 1 - + @@ -792,7 +940,7 @@ - + 2 @@ -829,14 +977,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqualOrGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -844,29 +992,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 200 + + + + + - - 200 - - + @@ -904,7 +1100,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src src @@ -916,7 +1112,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -968,7 +1164,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input3.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -28,7 +28,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10003.insclause-3 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10003.insclause-3 ../../../../build/contrib/hive/ql/test/data/warehouse/dest4.out @@ -48,7 +48,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10000.insclause-0 @@ -97,7 +97,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 @@ -117,7 +117,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10001.insclause-1 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10001.insclause-1 @@ -166,7 +166,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest2 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest2 @@ -195,7 +195,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10002.insclause-2 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10002.insclause-2 @@ -248,7 +248,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest3 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest3 @@ -292,7 +292,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10000.insclause-0 @@ -399,14 +399,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -414,33 +414,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + - + java.lang.Boolean @@ -500,7 +552,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10001.insclause-1 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10001.insclause-1 @@ -620,14 +672,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqualOrGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -635,29 +687,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 100 + + + + + - - 100 - - + @@ -667,14 +767,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -682,36 +782,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 200 + + + + + - - 200 - - + - + @@ -746,7 +894,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10002.insclause-2 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10002.insclause-2 @@ -780,7 +928,7 @@ 1 - + @@ -809,7 +957,7 @@ - + 2 @@ -866,14 +1014,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqualOrGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -881,29 +1029,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 200 + + + + + - - 200 - - + @@ -913,14 +1109,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -928,36 +1124,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 300 + + + + + - - 300 - - + - + @@ -992,7 +1236,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/382185459/89077292.10003.insclause-3 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/48576025/763323393.10003.insclause-3 @@ -1094,14 +1338,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqualOrGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -1109,29 +1353,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 300 + + + + + - - 300 - - + @@ -1169,7 +1461,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src src @@ -1181,7 +1473,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -1233,7 +1525,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -274,14 +274,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -289,33 +289,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 15 + + - - 15 + + - + java.lang.Boolean @@ -329,14 +381,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -344,36 +396,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 25 + + + + + - - 25 - - + - + @@ -686,14 +786,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -701,29 +801,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 10 + + + + + - - 10 - - + @@ -733,14 +881,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -748,36 +896,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 20 + + + + + - - 20 - - + - + @@ -842,7 +1038,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src c:a:src1 @@ -857,7 +1053,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -909,7 +1105,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -939,7 +1135,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/46163837/318815753.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/141770649/591024768.10001.insclause-0 Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -31,7 +31,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/952811677/1477355585.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/692060421/96243067.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 @@ -468,7 +468,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src tmap:src @@ -480,7 +480,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -532,7 +532,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -662,14 +662,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -677,26 +677,78 @@ - - - 0 + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + 0 + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -274,14 +274,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -289,33 +289,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 15 + + - - 15 + + - + java.lang.Boolean @@ -329,14 +381,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -344,36 +396,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 25 + + + + + - - 25 - - + - + @@ -686,14 +786,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -701,29 +801,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 10 + + + + + - - 10 - - + @@ -733,14 +881,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -748,36 +896,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 20 + + + + + - - 20 - - + - + @@ -842,7 +1038,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src c:a:src1 @@ -857,7 +1053,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -909,7 +1105,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -939,7 +1135,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/67901132/10357416.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1349751296/130011732.10001.insclause-0 Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -274,14 +274,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -289,33 +289,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 15 + + - - 15 + + - + java.lang.Boolean @@ -329,14 +381,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -344,36 +396,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 25 + + + + + - - 25 - - + - + @@ -686,14 +786,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -701,29 +801,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 10 + + + + + - - 10 - - + @@ -733,14 +881,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -748,36 +896,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 20 + + + + + - - 20 - - + - + @@ -842,7 +1038,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src c:a:src1 @@ -857,7 +1053,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -909,7 +1105,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -939,7 +1135,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/849791247/24076830.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/276317907/1163250400.10001.insclause-0 Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -274,14 +274,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -289,33 +289,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 15 + + - - 15 + + - + java.lang.Boolean @@ -329,14 +381,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -344,36 +396,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 25 + + + + + - - 25 - - + - + @@ -686,14 +786,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -701,29 +801,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 10 + + + + + - - 10 - - + @@ -733,14 +881,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -748,36 +896,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 20 + + + + + - - 20 - - + - + @@ -1093,14 +1289,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -1108,29 +1304,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 20 + + + + + - - 20 - - + @@ -1140,14 +1384,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -1155,36 +1399,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 25 + + + + + - - 25 - - + - + @@ -1249,7 +1541,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src c:a:src1 @@ -1267,7 +1559,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -1319,7 +1611,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -1349,7 +1641,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/144356131/476198727.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/336223571/148113729.10001.insclause-0 Index: ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input8.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -26,7 +26,7 @@ - /tmp/hive-njain/590666906.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/156343375.10001.insclause-0 @@ -74,7 +74,7 @@ - java.lang.Integer + java.lang.Double @@ -86,11 +86,7 @@ 1 - - - java.lang.Double - - + @@ -100,11 +96,7 @@ 2 - - - java.lang.Byte - - + @@ -130,10 +122,10 @@ evaluate - java.lang.Integer + java.lang.Double - java.lang.Integer + java.lang.Double @@ -141,13 +133,41 @@ - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 4 + + + + + - - 4 - @@ -207,7 +227,7 @@ 0 - + java.lang.String @@ -218,21 +238,21 @@ - + - + - + @@ -246,10 +266,10 @@ evaluate - java.lang.Byte + java.lang.Double - java.lang.Byte + java.lang.Double @@ -259,21 +279,21 @@ - + - + - + @@ -309,7 +329,7 @@ key - + @@ -334,7 +354,7 @@ 0 - + @@ -356,7 +376,7 @@ key - + @@ -366,7 +386,7 @@ value - + @@ -381,7 +401,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src1 src1 @@ -393,7 +413,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src1 @@ -445,7 +465,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src1 Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -274,14 +274,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -289,33 +289,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 15 + + - - 15 + + - + java.lang.Boolean @@ -329,14 +381,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -344,36 +396,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 25 + + + + + - - 25 - - + - + @@ -686,14 +786,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -701,29 +801,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 10 + + + + + - - 10 - - + @@ -733,14 +881,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -748,36 +896,84 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 20 + + + + + - - 20 - - + - + @@ -842,7 +1038,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src c:a:src1 @@ -857,7 +1053,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -909,7 +1105,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -943,7 +1139,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/27316771/18134100.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1138017406/17202700.10001.insclause-0 @@ -1149,7 +1345,7 @@ - + @@ -1183,14 +1379,14 @@ - + - + Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-njain/483179349.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/902483168.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 @@ -128,7 +128,7 @@ - /tmp/hive-njain/483179349.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/902483168.10000.insclause-0 @@ -237,10 +237,10 @@ evaluate - java.lang.String + java.lang.Double - java.lang.String + java.lang.Double @@ -250,14 +250,18 @@ - + + + java.lang.Double + + - + @@ -374,7 +378,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src1 src1 @@ -386,7 +390,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src1 @@ -438,7 +442,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src1 Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -28,7 +28,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/1982541537/686730669.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/323358384/1524460804.10000.insclause-0 ../build/ql/test/data/warehouse/union.out @@ -79,7 +79,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/1982541537/686730669.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/323358384/1524460804.10000.insclause-0 @@ -260,14 +260,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -275,33 +275,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + - + java.lang.Boolean @@ -490,14 +542,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -505,29 +557,77 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 100 + + + + + - - 100 - - + @@ -590,7 +690,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src null-subquery1:unioninput-subquery1:src @@ -605,7 +705,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -657,7 +757,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -30,7 +30,7 @@ - /tmp/hive-njain/487094769.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1023952720.10001.insclause-0 @@ -1141,14 +1141,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqual - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -1156,26 +1156,78 @@ - - - 0 + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + 0 + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 86 + + - - 86 + + @@ -1288,7 +1340,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src src @@ -1300,7 +1352,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -1352,7 +1404,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -26,7 +26,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/60338291/166255580.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/51778219/26693039.10001.insclause-0 @@ -232,14 +232,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -247,33 +247,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + - + java.lang.Boolean @@ -324,14 +376,14 @@ - + - + @@ -378,14 +430,14 @@ - + - + @@ -464,7 +516,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 srcpart @@ -476,7 +528,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 @@ -541,7 +593,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/srcpart + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcpart Index: ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -20,7 +20,7 @@ - /tmp/hive-zshao/1268121/343796403.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/39762029/165173809.10002 @@ -118,7 +118,7 @@ 3 - + @@ -128,7 +128,7 @@ 4 - + @@ -153,7 +153,7 @@ serialization.ddl - struct binary_table { string reducesinkvalue0, double reducesinkvalue1, string reducesinkvalue2, double reducesinkvalue3, double reducesinkvalue4} + struct binary_table { string reducesinkvalue0, double reducesinkvalue1, string reducesinkvalue2, string reducesinkvalue3, string reducesinkvalue4} serialization.format @@ -176,7 +176,7 @@ - /tmp/hive-zshao/1268121/343796403.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/39762029/165173809.10002 @@ -197,7 +197,7 @@ serialization.ddl - struct binary_table { string temporarycol0, double temporarycol1, string temporarycol2, double temporarycol3, double temporarycol4} + struct binary_table { string temporarycol0, double temporarycol1, string temporarycol2, string temporarycol3, string temporarycol4} serialization.format @@ -256,7 +256,7 @@ 3 - + @@ -266,7 +266,7 @@ 4 - + @@ -423,41 +423,17 @@ - org.apache.hadoop.hive.ql.udf.UDAFMin + org.apache.hadoop.hive.ql.udf.UDAFMin$MinStringEvaluator - - - org.apache.hadoop.hive.ql.udf.UDFToDouble + + + KEY.0 - - - evaluate - - - java.lang.String - - - - - - - - - - KEY.0 - - - - - - - - - + @@ -468,41 +444,17 @@ - org.apache.hadoop.hive.ql.udf.UDAFMax + org.apache.hadoop.hive.ql.udf.UDAFMax$MaxStringEvaluator - - - org.apache.hadoop.hive.ql.udf.UDFToDouble + + + KEY.0 - - - evaluate - - - java.lang.String - - - - - - - - - - KEY.0 - - - - - - - - - + @@ -822,7 +774,7 @@ VALUE.3 - + @@ -832,7 +784,7 @@ VALUE.4 - + @@ -853,10 +805,10 @@ - /tmp/hive-zshao/1268121/343796403.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/39762029/165173809.10002 - /tmp/hive-zshao/1268121/343796403.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/39762029/165173809.10002 @@ -865,7 +817,7 @@ - /tmp/hive-zshao/1268121/343796403.10002 + /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/39762029/165173809.10002 @@ -887,7 +839,7 @@ - /tmp/hive-zshao/147248168.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/579206814.10001.insclause-0 @@ -963,7 +915,7 @@ 3 - + @@ -973,7 +925,7 @@ 4 - + @@ -1025,7 +977,7 @@ 4 - + @@ -1035,7 +987,7 @@ 3 - + @@ -1073,12 +1025,36 @@ - - - VALUE.0 + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + VALUE.0 + + + + + + + + - + @@ -1115,12 +1091,36 @@ - - - VALUE.2 + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + VALUE.2 + + + + + + + + - + @@ -1131,7 +1131,7 @@ - org.apache.hadoop.hive.ql.udf.UDAFMin + org.apache.hadoop.hive.ql.udf.UDAFMin$MinStringEvaluator @@ -1141,7 +1141,7 @@ VALUE.3 - + @@ -1152,7 +1152,7 @@ - org.apache.hadoop.hive.ql.udf.UDAFMax + org.apache.hadoop.hive.ql.udf.UDAFMax$MaxStringEvaluator @@ -1162,7 +1162,7 @@ VALUE.4 - + @@ -1229,7 +1229,7 @@ 3 - + @@ -1239,7 +1239,7 @@ 4 - + @@ -1281,7 +1281,7 @@ - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src src @@ -1293,7 +1293,7 @@ - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -1345,7 +1345,7 @@ location - file:/data/users/zshao/sync/apache-trunk-HIVE-104/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -28,7 +28,7 @@ true - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/797066151/209780209.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/538025002/104579387.10000.insclause-0 ../build/ql/test/data/warehouse/union.out @@ -75,7 +75,7 @@ - /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/797066151/209780209.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/538025002/104579387.10000.insclause-0 @@ -274,14 +274,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -289,26 +289,78 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + @@ -378,7 +430,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src unioninput:src @@ -390,7 +442,7 @@ - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -442,7 +494,7 @@ location - file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-njain/475551879/391349136.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1538040054/309610911.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 @@ -128,7 +128,7 @@ - /tmp/hive-njain/475551879/391349136.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1538040054/309610911.10000.insclause-0 @@ -235,14 +235,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPGreaterThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -250,33 +250,85 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + - + java.lang.Boolean @@ -417,14 +469,14 @@ - + - + 2147483647 @@ -434,14 +486,14 @@ - + - + 4 @@ -451,14 +503,14 @@ - + - + 0 @@ -468,7 +520,7 @@ - + @@ -499,7 +551,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/srcbucket/kv1.txt + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcbucket/kv1.txt s @@ -511,7 +563,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/srcbucket/kv1.txt + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcbucket/kv1.txt @@ -567,7 +619,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/srcbucket + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/srcbucket Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -30,7 +30,7 @@ - /tmp/hive-njain/802827123.10001.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/1339188432.10001.insclause-0 @@ -569,14 +569,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPEqual - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -584,27 +584,75 @@ - - - 0 + + + org.apache.hadoop.hive.ql.udf.UDFToDouble - - - - java.lang.String + + + evaluate + + + java.lang.String + + + + + + + + + + 0 + + + + + java.lang.String + + + + + + + - + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + + 86 + + + + + - + - - 86 - @@ -716,7 +764,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src src @@ -728,7 +776,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -780,7 +828,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 736746) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -31,7 +31,7 @@ true - /tmp/hive-njain/202548334/480112542.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/265277901/92421267.10000.insclause-0 @@ -80,7 +80,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/dest1 + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/dest1 @@ -124,7 +124,7 @@ - /tmp/hive-njain/202548334/480112542.10000.insclause-0 + /data/users/athusoo/apacheprojects/hive_local_ws3/ql/../build/ql/tmp/265277901/92421267.10000.insclause-0 @@ -228,14 +228,14 @@ org.apache.hadoop.hive.ql.udf.UDFOPLessThan - + evaluate - java.lang.String + java.lang.Double - java.lang.Number + java.lang.Double @@ -243,26 +243,78 @@ - - - key + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + evaluate + + + java.lang.String + + + + + + + + + + key + + + + + + + + - + + + java.lang.Double + + - - - - - java.lang.Integer + + + org.apache.hadoop.hive.ql.udf.UDFToDouble + + + + evaluate + + + java.lang.Integer + + + + + + + + + + + + java.lang.Integer + + + + + 100 + + - - 100 + + @@ -332,7 +384,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src src @@ -344,7 +396,7 @@ - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src @@ -396,7 +448,7 @@ location - file:/home/njain/workspace/hadoophive/trunk/build/ql/test/data/warehouse/src + file:/data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/test/data/warehouse/src Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (revision 736746) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (working copy) @@ -146,16 +146,32 @@ private filterDesc getTestFilterDesc(String column) { - ArrayList children = new ArrayList(); - children.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), column)); - children.add(new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(Number.class), Long.valueOf(100))); - + ArrayList children1 = new ArrayList(); + children1.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), column)); + exprNodeDesc lhs = new exprNodeFuncDesc( + TypeInfoFactory.getPrimitiveTypeInfo(Double.class), + FunctionRegistry.getUDFClass(Double.class.getName()), + FunctionRegistry.getUDFMethod(Double.class.getName(), String.class), + children1); + + ArrayList children2 = new ArrayList(); + children2.add(new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(Long.class), Long.valueOf(100))); + exprNodeDesc rhs = new exprNodeFuncDesc( + TypeInfoFactory.getPrimitiveTypeInfo(Double.class), + FunctionRegistry.getUDFClass(Double.class.getName()), + FunctionRegistry.getUDFMethod(Double.class.getName(), Long.class), + children2); + + ArrayList children3 = new ArrayList(); + children3.add(lhs); + children3.add(rhs); + exprNodeDesc desc = new exprNodeFuncDesc( TypeInfoFactory.getPrimitiveTypeInfo(Boolean.class), FunctionRegistry.getUDFClass("<"), - FunctionRegistry.getUDFMethod("<", true, String.class, Number.class), - children - ); + FunctionRegistry.getUDFMethod("<", Double.class, Double.class), + children3); + return new filterDesc(desc); } Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (revision 736746) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (working copy) @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.exprNodeDesc; @@ -111,7 +112,7 @@ exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola"); exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1))); exprNodeDesc cola0desc = new exprNodeIndexDesc(coladesc, new exprNodeConstantDesc(new Integer(0))); - exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("concat", col11desc, cola0desc); + exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", col11desc, cola0desc); ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1); // evaluate on row @@ -130,7 +131,7 @@ // get a evaluator for a string concatenation expression exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1"); exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1))); - exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(Double.class.getName(), col11desc); + exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Double.class.getName(), col11desc); ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1); // evaluate on row @@ -164,7 +165,7 @@ measureSpeed("1 + 2", basetimes * 100, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("+", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+", new exprNodeConstantDesc(1), new exprNodeConstantDesc(2))), r, @@ -172,8 +173,8 @@ measureSpeed("1 + 2 - 3", basetimes * 100, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("-", - SemanticAnalyzer.getFuncExprNodeDesc("+", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("-", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+", new exprNodeConstantDesc(1), new exprNodeConstantDesc(2)), new exprNodeConstantDesc(3))), @@ -182,9 +183,9 @@ measureSpeed("1 + 2 - 3 + 4", basetimes * 100, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("+", - SemanticAnalyzer.getFuncExprNodeDesc("-", - SemanticAnalyzer.getFuncExprNodeDesc("+", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("-", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+", new exprNodeConstantDesc(1), new exprNodeConstantDesc(2)), new exprNodeConstantDesc(3)), @@ -194,7 +195,7 @@ measureSpeed("concat(\"1\", \"2\")", basetimes * 100, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", new exprNodeConstantDesc("1"), new exprNodeConstantDesc("2"))), r, @@ -202,8 +203,8 @@ measureSpeed("concat(concat(\"1\", \"2\"), \"3\")", basetimes * 100, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("concat", - SemanticAnalyzer.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", new exprNodeConstantDesc("1"), new exprNodeConstantDesc("2")), new exprNodeConstantDesc("3"))), @@ -212,9 +213,9 @@ measureSpeed("concat(concat(concat(\"1\", \"2\"), \"3\"), \"4\")", basetimes * 100, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("concat", - SemanticAnalyzer.getFuncExprNodeDesc("concat", - SemanticAnalyzer.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", new exprNodeConstantDesc("1"), new exprNodeConstantDesc("2")), new exprNodeConstantDesc("3")), @@ -226,7 +227,7 @@ measureSpeed("concat(col1[1], cola[1])", basetimes * 10, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1), new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1))), r, @@ -234,8 +235,8 @@ measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", basetimes * 10, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("concat", - SemanticAnalyzer.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1), new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)), new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2))), @@ -244,9 +245,9 @@ measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])", basetimes * 10, ExprNodeEvaluatorFactory.get( - SemanticAnalyzer.getFuncExprNodeDesc("concat", - SemanticAnalyzer.getFuncExprNodeDesc("concat", - SemanticAnalyzer.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", + TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1), new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)), new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2)), Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (revision 736746) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (working copy) @@ -26,6 +26,7 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; import org.apache.hadoop.hive.ql.plan.*; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -44,7 +45,7 @@ // initialize a complete map reduce configuration exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F1); exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F2); - exprNodeDesc filterExpr = SemanticAnalyzer.getFuncExprNodeDesc("==", expr1, expr2); + exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", expr1, expr2); filterDesc filterCtx = new filterDesc(filterExpr); Operator op = OperatorFactory.get(filterDesc.class); Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (revision 736746) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; import org.apache.hadoop.hive.ql.plan.*; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -74,10 +75,10 @@ exprNodeDesc col0 = new exprNodeColumnDesc(String.class, "col0"); exprNodeDesc col1 = new exprNodeColumnDesc(String.class, "col1"); exprNodeDesc col2 = new exprNodeColumnDesc(String.class, "col2"); - exprNodeDesc zero = new exprNodeConstantDesc(Number.class, Long.valueOf(0)); - exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(">", col2, col1); - exprNodeDesc func2 = SemanticAnalyzer.getFuncExprNodeDesc("==", col0, zero); - exprNodeDesc func3 = SemanticAnalyzer.getFuncExprNodeDesc("&&", func1, func2); + exprNodeDesc zero = new exprNodeConstantDesc(String.class, "0"); + exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1); + exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero); + exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2); assert(func3 != null); filterDesc filterCtx = new filterDesc(func3); @@ -122,7 +123,7 @@ ArrayList exprDesc2children = new ArrayList(); exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0"); exprNodeDesc expr2 = new exprNodeConstantDesc("1"); - exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2); + exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2); // select operator to project these two columns ArrayList earr = new ArrayList (); @@ -167,7 +168,7 @@ // col2 exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0"); exprNodeDesc expr2 = new exprNodeConstantDesc("1"); - exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2); + exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2); // select operator to project these two columns ArrayList earr = new ArrayList (); Index: ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (revision 736746) +++ ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (working copy) @@ -23,7 +23,7 @@ /** * A UDF for testing, which evaluates the length of a string. */ -public class UDFTestLength implements UDF { +public class UDFTestLength extends UDF { public Integer evaluate(String s) { return s == null ? null : s.length(); } Index: ql/src/test/queries/clientpositive/implicit_cast1.q =================================================================== --- ql/src/test/queries/clientpositive/implicit_cast1.q (revision 0) +++ ql/src/test/queries/clientpositive/implicit_cast1.q (revision 0) @@ -0,0 +1,13 @@ +CREATE TABLE implicit_test1(a BIGINT, b STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES('serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol') STORED AS TEXTFILE; + +EXPLAIN +SELECT implicit_test1.* +FROM implicit_test1 +WHERE implicit_test1.a <> 0; + +SELECT implicit_test1.* +FROM implicit_test1 +WHERE implicit_test1.a <> 0; + +DROP TABLE implicit_test1; + Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (working copy) @@ -55,7 +55,7 @@ * Processor Context for creating map reduce task. Walk the tree in a DFS manner and process the nodes. Some state is * maintained about the current nodes visited so far. */ -public class GenMRProcContext extends NodeProcessorCtx { +public class GenMRProcContext implements NodeProcessorCtx { /** * GenMapRedCtx is used to keep track of the current state. Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java (working copy) @@ -41,10 +41,11 @@ * @param nd the reduce sink operator encountered * @param procCtx context */ - public void process(Node nd, NodeProcessorCtx procCtx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { GenMRProcContext ctx = (GenMRProcContext)procCtx; Map, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx(); mapCurrCtx.put((Operator)nd, new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrTopOp(), ctx.getCurrAliasId())); + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy) @@ -54,7 +54,7 @@ * Node Processor for Column Pruning on Filter Operators. */ public static class ColumnPrunerFilterProc implements NodeProcessor { - public void process(Node nd, NodeProcessorCtx ctx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { FilterOperator op = (FilterOperator)nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx)ctx; exprNodeDesc condn = op.getConf().getPredicate(); @@ -62,6 +62,7 @@ List cl = condn.getCols(); // merge it with the downstream col list cppCtx.getPrunedColLists().put(op, Utilities.mergeUniqElems(cppCtx.genColLists(op), cl)); + return null; } } @@ -77,7 +78,7 @@ * Node Processor for Column Pruning on Group By Operators. */ public static class ColumnPrunerGroupByProc implements NodeProcessor { - public void process(Node nd, NodeProcessorCtx ctx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { GroupByOperator op = (GroupByOperator)nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx)ctx; List colLists = new ArrayList(); @@ -94,6 +95,7 @@ } cppCtx.getPrunedColLists().put(op, colLists); + return null; } } @@ -109,10 +111,12 @@ * The Default Node Processor for Column Pruning. */ public static class ColumnPrunerDefaultProc implements NodeProcessor { - public void process(Node nd, NodeProcessorCtx ctx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx)ctx; cppCtx.getPrunedColLists().put((Operator)nd, cppCtx.genColLists((Operator)nd)); + + return null; } } @@ -128,7 +132,7 @@ * The Node Processor for Column Pruning on Reduce Sink Operators. */ public static class ColumnPrunerReduceSinkProc implements NodeProcessor { - public void process(Node nd, NodeProcessorCtx ctx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator)nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx)ctx; HashMap, OpParseContext> opToParseCtxMap = @@ -171,6 +175,7 @@ } cppCtx.getPrunedColLists().put(op, colLists); + return null; } } @@ -186,7 +191,7 @@ * The Node Processor for Column Pruning on Select Operators. */ public static class ColumnPrunerSelectProc implements NodeProcessor { - public void process(Node nd, NodeProcessorCtx ctx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { SelectOperator op = (SelectOperator)nd; ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx)ctx; List cols = new ArrayList(); @@ -198,7 +203,7 @@ // which should be fixed before remove this if ((child instanceof FileSinkOperator) || (child instanceof ScriptOperator)) { cppCtx.getPrunedColLists().put(op, cppCtx.getColsFromSelectExpr(op)); - return; + return null; } cols = Utilities.mergeUniqElems(cols, cppCtx.getPrunedColLists().get(child)); } @@ -209,9 +214,10 @@ // The input to the select does not matter. Go over the expressions // and return the ones which have a marked column cppCtx.getPrunedColLists().put(op, cppCtx.getSelectColsFromChildren(op, cols)); - return; + return null; } cppCtx.getPrunedColLists().put(op, cppCtx.getColsFromSelectExpr(op)); + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (working copy) @@ -44,7 +44,7 @@ * @param nd the table sink operator encountered * @param opProcCtx context */ - public void process(Node nd, NodeProcessorCtx opProcCtx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { TableScanOperator op = (TableScanOperator)nd; GenMRProcContext ctx = (GenMRProcContext)opProcCtx; ParseContext parseCtx = ctx.getParseCtx(); @@ -62,10 +62,11 @@ String currAliasId = alias; ctx.setCurrAliasId(currAliasId); mapCurrCtx.put(op, new GenMapRedCtx(currTask, currTopOp, currAliasId)); - return; + return null; } } assert false; + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (working copy) @@ -171,7 +171,7 @@ // Create a list of topop nodes ArrayList topNodes = new ArrayList(); topNodes.addAll(pGraphContext.getTopOps().values()); - ogw.startWalking(topNodes); + ogw.startWalking(topNodes, null); // create a new select operator if any of input tables' columns can be pruned for (String alias_id : pGraphContext.getTopOps().keySet()) { Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (working copy) @@ -45,7 +45,7 @@ * @param nd the reduce sink operator encountered * @param opProcCtx context */ - public void process(Node nd, NodeProcessorCtx opProcCtx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator)nd; GenMRProcContext ctx = (GenMRProcContext)opProcCtx; @@ -79,6 +79,7 @@ } mapCurrCtx.put(op, new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrTopOp(), ctx.getCurrAliasId())); + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java (working copy) @@ -43,7 +43,7 @@ * @param nd the reduce sink operator encountered * @param opProcCtx context */ - public void process(Node nd, NodeProcessorCtx opProcCtx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { ReduceSinkOperator op = (ReduceSinkOperator)nd; GenMRProcContext ctx = (GenMRProcContext)opProcCtx; @@ -69,6 +69,7 @@ } mapCurrCtx.put(op, new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrTopOp(), ctx.getCurrAliasId())); + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java (working copy) @@ -36,7 +36,7 @@ /** * This class implements the processor context for Column Pruner. */ -public class ColumnPrunerProcCtx extends NodeProcessorCtx { +public class ColumnPrunerProcCtx implements NodeProcessorCtx { private Map,List> prunedColLists; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy) @@ -44,7 +44,7 @@ * @param nd the file sink operator encountered * @param opProcCtx context */ - public void process(Node nd, NodeProcessorCtx opProcCtx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx opProcCtx, Object... nodeOutputs) throws SemanticException { FileSinkOperator op = (FileSinkOperator)nd; GenMRProcContext ctx = (GenMRProcContext)opProcCtx; boolean ret = false; @@ -82,5 +82,6 @@ currTask.removeDependentTask(mvTask); } } + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluator.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluator.java (revision 0) @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +/** + * Interface that encapsulates the evaluation logic of a UDAF. One evaluator is needed + * for every overloaded form of a UDAF .e.g max and min UDAFs would have evaluators for + * integer, string and other types. On the other hand avg would have an evaluator only + * for the double type. + */ +public interface UDAFEvaluator { + + /** + * Initializer. Initializes the state for the evaluator. + */ + public void init(); +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAF.java (revision 0) @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +/** + * Base class of numeric UDAFs like sum and avg which need a NumericUDAFEvaluatorResolver. + */ +public class NumericUDAF extends UDAF { + + /** + * Constructor. + */ + public NumericUDAF() { + setResolver(new NumericUDAFEvaluatorResolver(this.getClass())); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (working copy) @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec; -import java.util.Arrays; import java.util.HashMap; import java.util.ArrayList; import java.util.List; @@ -37,7 +36,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.typeinfo.PrimitiveTypeInfo; @@ -62,7 +60,7 @@ // so aggregationIsDistinct is a boolean array instead of a single number. transient protected boolean[] aggregationIsDistinct; - transient Class[] aggregationClasses; + transient Class[] aggregationClasses; transient protected Method[] aggregationsAggregateMethods; transient protected Method[] aggregationsEvaluateMethods; @@ -71,11 +69,11 @@ // Used by sort-based GroupBy: Mode = COMPLETE, PARTIAL1, PARTIAL2 transient protected ArrayList currentKeys; - transient protected UDAF[] aggregations; + transient protected UDAFEvaluator[] aggregations; transient protected Object[][] aggregationsParametersLastInvoke; // Used by hash-based GroupBy: Mode = HASH - transient protected HashMap, UDAF[]> hashAggregations; + transient protected HashMap, UDAFEvaluator[]> hashAggregations; transient boolean firstRow; transient long totalMemory; @@ -139,7 +137,7 @@ } // init aggregationClasses - aggregationClasses = (Class[]) new Class[conf.getAggregators().size()]; + aggregationClasses = (Class[]) new Class[conf.getAggregators().size()]; for (int i = 0; i < conf.getAggregators().size(); i++) { aggregationDesc agg = conf.getAggregators().get(i); aggregationClasses[i] = agg.getAggregationClass(); @@ -151,13 +149,13 @@ aggregationsEvaluateMethods = new Method[aggregationClasses.length]; String evaluateMethodName = ((conf.getMode() == groupByDesc.Mode.PARTIAL1 || conf.getMode() == groupByDesc.Mode.HASH || conf.getMode() == groupByDesc.Mode.PARTIAL2) - ? "evaluatePartial" : "evaluate"); + ? "terminatePartial" : "terminate"); for(int i=0; i, UDAF[]>(); + hashAggregations = new HashMap, UDAFEvaluator[]>(); hashAggr = true; keyPositionsSize = new ArrayList(); } @@ -329,19 +327,20 @@ // fields in these aggregation classes. for(int i=0; i < aggregationClasses.length; i++) { fixedRowSize += javaObjectOverHead; - Class agg = aggregationClasses[i]; + Class agg = aggregationClasses[i]; Field[] fArr = agg.getFields(); for (Field f : fArr) fixedRowSize += getSize(i, agg, f); } } - protected UDAF[] newAggregations() throws HiveException { - UDAF[] aggs = new UDAF[aggregationClasses.length]; + protected UDAFEvaluator[] newAggregations() throws HiveException { + UDAFEvaluator[] aggs = new UDAFEvaluator[aggregationClasses.length]; for(int i=0; i newKeys) throws HiveException { // Prepare aggs for updating - UDAF[] aggs = null; + UDAFEvaluator[] aggs = null; boolean newEntry = false; // hash-based aggregations @@ -451,7 +450,7 @@ private void processAggr(Object row, ObjectInspector rowInspector, ArrayList newKeys) throws HiveException { // Prepare aggs for updating - UDAF[] aggs = null; + UDAFEvaluator[] aggs = null; Object[][] lastInvoke = null; boolean keysAreEqual = newKeys.equals(currentKeys); @@ -464,7 +463,7 @@ currentKeys = newKeys; // init aggregations - for(UDAF aggregation: aggregations) + for(UDAFEvaluator aggregation: aggregations) aggregation.init(); // clear parameters in last-invoke @@ -493,14 +492,14 @@ totalVariableSize += ((String)key).length(); } - UDAF[] aggs = null; + UDAFEvaluator[] aggs = null; if (aggrPositions.size() > 0) aggs = hashAggregations.get(newKeys); for (varLenFields v : aggrPositions) { int aggrPos = v.getAggrPos(); List fieldsVarLen = v.getFields(); - UDAF agg = aggs[aggrPos]; + UDAFEvaluator agg = aggs[aggrPos]; try { @@ -532,7 +531,7 @@ Iterator iter = hashAggregations.entrySet().iterator(); int numDel = 0; while (iter.hasNext()) { - Map.Entry, UDAF[]> m = (Map.Entry)iter.next(); + Map.Entry, UDAFEvaluator[]> m = (Map.Entry)iter.next(); forward(m.getKey(), m.getValue()); iter.remove(); numDel++; @@ -548,7 +547,7 @@ * The keys in the record * @throws HiveException */ - protected void forward(ArrayList keys, UDAF[] aggs) throws HiveException { + protected void forward(ArrayList keys, UDAFEvaluator[] aggs) throws HiveException { int totalFields = keys.size() + aggs.length; List a = new ArrayList(totalFields); for(int i=0; i udafClass) { + super(udafClass); + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.ql.exec.UDAFMethodResolver#getEvaluatorClass(java.util.List) + */ + @Override + public Class getEvaluatorClass( + List> argClasses) throws AmbiguousMethodException { + // Go through the argClasses and for any string, void or date time, start looking for doubles + ArrayList> args = new ArrayList>(); + for(Classarg: argClasses) { + if (arg == Void.class || arg == String.class || arg == Date.class) { + args.add(Double.class); + } + else { + args.add(arg); + } + } + + return super.getEvaluatorClass(args); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java (working copy) @@ -33,6 +33,33 @@ * * "evaluate" should never be a void method. However it can return "null" if needed. */ -public interface UDF { +public class UDF { + + /** + * The resolver to use for method resolution. + */ + private UDFMethodResolver rslv; + /** + * The constructor + */ + public UDF() { + rslv = new DefaultUDFMethodResolver(this.getClass()); + } + + /** + * Sets the resolver + * + * @param The method resolver to use for method resolution. + */ + public void setResolver(UDFMethodResolver rslv) { + this.rslv = rslv; + } + + /** + * Get the method resolver. + */ + public UDFMethodResolver getResolver() { + return rslv; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java (revision 0) @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.util.List; + +/** + * The UDF Method resolver interface. A user can plugin a resolver to their UDF by implementing the + * functions in this interface. Note that the resolver is stored in the UDF class as an instance + * variable. We did not use a static variable because many resolvers maintain the class of the + * enclosing UDF as state and are called from a base class e.g. UDFBaseCompare. This makes it very + * easy to write UDFs that want to do resolution similar to the comparison operators. Such UDFs + * just need to extend UDFBaseCompare and do not have to care about the UDFMethodResolver interface. + * Same is true for UDFs that want to do resolution similar to that done by the numeric operators. + * Such UDFs simply have to extend UDFBaseNumericOp class. For the default resolution the UDF + * implementation simply needs to extend the UDF class. + */ +public interface UDAFEvaluatorResolver { + + /** + * Gets the evaluator class corresponding to the passed parameter list. + */ + Class getEvaluatorClass(List> argClasses) + throws AmbiguousMethodException; + +} \ No newline at end of file Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java (revision 0) @@ -0,0 +1,93 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.List; + +/** + * The default UDAF Method resolver. This resolver is used for resolving the UDAF methods are + * used for partial and final evaluation given the list of the argument types. The getEvalMethod goes through all the + * evaluate methods and returns the one that matches the argument signature or is the closest match. + * Closest match is defined as the one that requires the least number of arguments to be converted. + * In case more than one matches are found, the method throws an ambiguous method exception. + */ +public class DefaultUDAFEvaluatorResolver implements UDAFEvaluatorResolver { + + /** + * The class of the UDAF. + */ + private Class udafClass; + + /** + * Constructor. + * This constructor sets the resolver to be used for comparison operators. + * See {@link UDAFEvaluatorResolver} + */ + public DefaultUDAFEvaluatorResolver(Class udafClass) { + this.udafClass = udafClass; + } + + /** + * Gets the evaluator class for the UDAF given the parameter types. + * + * @param argClasses The list of the parameter types. + */ + public Class getEvaluatorClass(List> argClasses) + throws AmbiguousMethodException { + + ArrayList> classList = new ArrayList>(); + + // Add the udaf class if it implements and evaluator + for(Class iface: udafClass.getInterfaces()) { + if (iface == UDAFEvaluator.class) { + Class udafClass2 = (Class) udafClass; + classList.add(udafClass2); + } + } + + // Add all the public member classes that implement an evaluator + for(Class enclClass: udafClass.getClasses()) { + for(Class iface: enclClass.getInterfaces()) { + if (iface == UDAFEvaluator.class) { + classList.add((Class)enclClass); + } + } + } + + // Next we locate all the iterate methods for each of these classes. + ArrayList mList = new ArrayList(); + for(Class evaluator: classList) { + for(Method m: evaluator.getMethods()) { + if (m.getName().equalsIgnoreCase("iterate")) { + mList.add(m); + } + } + } + + Method m = FunctionRegistry.getMethodInternal(mList, false, argClasses); + if (m == null) { + throw new AmbiguousMethodException(udafClass, argClasses); + } + + return (Class)m.getDeclaringClass(); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDFMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDFMethodResolver.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDFMethodResolver.java (revision 0) @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.lang.reflect.Method; +import java.util.List; + +/** + * The UDF Method resolver interface. A user can plugin a resolver to their UDF by implementing the + * functions in this interface. Note that the resolver is stored in the UDF class as an instance + * variable. We did not use a static variable because many resolvers maintain the class of the + * enclosing UDF as state and are called from a base class e.g. UDFBaseCompare. This makes it very + * easy to write UDFs that want to do resolution similar to the comparison operators. Such UDFs + * just need to extend UDFBaseCompare and do not have to care about the UDFMethodResolver interface. + * Same is true for UDFs that want to do resolution similar to that done by the numeric operators. + * Such UDFs simply have to extend UDFBaseNumericOp class. For the default resolution the UDF + * implementation simply needs to extend the UDF class. + */ +public interface UDFMethodResolver { + + /** + * Gets the evaluate method for the UDF given the parameter types. + * + * @param argClasses The list of the argument types that need to matched with the evaluate + * function signature. + */ + public Method getEvalMethod(List> argClasses) + throws AmbiguousMethodException; +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java (revision 0) @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.lang.reflect.Method; +import java.util.List; + +/** + * The default UDF Method resolver. This resolver is used for resolving the UDF method that is to be + * used for evaluation given the list of the argument types. The getEvalMethod goes through all the + * evaluate methods and returns the one that matches the argument signature or is the closest match. + * Closest match is defined as the one that requires the least number of arguments to be converted. + * In case more than one matches are found, the method throws an ambiguous method exception. + */ +public class DefaultUDFMethodResolver implements UDFMethodResolver { + + /** + * The class of the UDF. + */ + private Class udfClass; + + /** + * Constructor. + * This constructor sets the resolver to be used for comparison operators. + * See {@link UDFMethodResolver} + */ + public DefaultUDFMethodResolver(Class udfClass) { + this.udfClass = udfClass; + } + + /** + * Gets the evaluate method for the UDF given the parameter types. + * + * @param argClasses The list of the argument types that need to matched with the evaluate + * function signature. + */ + public Method getEvalMethod(List> argClasses) + throws AmbiguousMethodException { + Method m = FunctionRegistry.getMethodInternal(udfClass, "evaluate", false, argClasses); + if (m == null) { + throw new AmbiguousMethodException(udfClass, argClasses); + } + return m; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java (revision 0) @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.util.List; + +/** + * Exception thrown by the UDF and UDAF method resolvers in case a unique method is not found. + * + */ +public class AmbiguousMethodException extends Exception { + + /** + * + */ + private static final long serialVersionUID = 1L; + + /** + * The UDF or UDAF class that has the ambiguity. + */ + Class funcClass; + + /** + * The list of parameter types. + */ + List> argClasses; + + /** + * Constructor. + * + * @param funcClass The UDF or UDAF class. + * @param argClasses The list of argument types that lead to an ambiguity. + */ + AmbiguousMethodException(Class funcClass, List> argClasses) { + this.funcClass = funcClass; + this.argClasses = argClasses; + } + + Class getFunctionClass() { + return funcClass; + } + + List> getArgTypeList() { + return argClasses; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -49,12 +49,6 @@ OperatorType.PREFIX, false); registerUDF("concat", UDFConcat.class, OperatorType.PREFIX, false); registerUDF("substr", UDFSubstr.class, OperatorType.PREFIX, false); - registerUDF("str_eq", UDFStrEq.class, OperatorType.PREFIX, false); - registerUDF("str_ne", UDFStrNe.class, OperatorType.PREFIX, false); - registerUDF("str_gt", UDFStrGt.class, OperatorType.PREFIX, false); - registerUDF("str_lt", UDFStrLt.class, OperatorType.PREFIX, false); - registerUDF("str_ge", UDFStrGe.class, OperatorType.PREFIX, false); - registerUDF("str_le", UDFStrLe.class, OperatorType.PREFIX, false); registerUDF("size", UDFSize.class, OperatorType.PREFIX, false); @@ -150,15 +144,26 @@ public static FunctionInfo getInfo(Class fClass) { for(Map.Entry ent: mFunctions.entrySet()) { FunctionInfo val = ent.getValue(); - if (val.getUDFClass() == fClass || - val.getUDAFClass() == fClass) { + if (val.getUDFClass() == fClass) { return val; } + // Otherwise this is potentially an aggregate evaluator + if (val.getUDAFClass() == fClass) { + return val; + } + // Otherwise check if the aggregator is one of the classes within the UDAF + if (val.getUDAFClass() != null) { + for(Class c: val.getUDAFClass().getClasses()) { + if (c == fClass) { + return val; + } + } + } } - + return null; } - + public static void registerUDF(String functionName, Class UDFClass, FunctionInfo.OperatorType opt, boolean isOperator) { if (UDF.class.isAssignableFrom(UDFClass)) { @@ -254,65 +259,65 @@ /** * Get the UDF method for the name and argumentClasses. * @param name the name of the UDF - * @param argumentClasses - * @param exact if true, we don't allow implicit type conversions. + * @param argumentClasses * @return */ - public static Method getUDFMethod(String name, boolean exact, List> argumentClasses) { + public static Method getUDFMethod(String name, List> argumentClasses) { Class udf = getUDFClass(name); if (udf == null) return null; - return getMethodInternal(udf, "evaluate", exact, argumentClasses); + Method udfMethod = null; + try { + udfMethod = udf.newInstance().getResolver().getEvalMethod(argumentClasses); + } + catch (AmbiguousMethodException e) { + } + catch (Exception e) { + throw new RuntimeException("getUDFMethod exception: " + e.getMessage()); + } + return udfMethod; } /** + * Get the UDAF evaluator for the name and argumentClasses. + * @param name the name of the UDAF + * @param argumentClasses + * @return + */ + public static Class getUDAFEvaluator(String name, List> argumentClasses) { + Class udf = getUDAF(name); + if (udf == null) return null; + + Class evalClass = null; + try { + evalClass = udf.newInstance().getResolver().getEvaluatorClass(argumentClasses); + } + catch (AmbiguousMethodException e) { + } + catch (Exception e) { + throw new RuntimeException("getUADFEvaluator exception: " + e.getMessage()); + } + return evalClass; + } + + /** * This method is shared between UDFRegistry and UDAFRegistry. * methodName will be "evaluate" for UDFRegistry, and "aggregate"/"evaluate"/"evaluatePartial" for UDAFRegistry. */ public static Method getMethodInternal(Class udfClass, String methodName, boolean exact, List> argumentClasses) { - int leastImplicitConversions = Integer.MAX_VALUE; - Method udfMethod = null; + ArrayList mlist = new ArrayList(); + for(Method m: Arrays.asList(udfClass.getMethods())) { if (m.getName().equals(methodName)) { - - Class[] argumentTypeInfos = m.getParameterTypes(); - - boolean match = (argumentTypeInfos.length == argumentClasses.size()); - int implicitConversions = 0; - - for(int i=0; i accepted = ObjectInspectorUtils.generalizePrimitive(argumentTypeInfos[i]); - if (accepted.isAssignableFrom(argumentClasses.get(i))) { - // do nothing if match - } else if (!exact && implicitConvertable(argumentClasses.get(i), accepted)) { - implicitConversions ++; - } else { - match = false; - } - } - - if (match) { - // Always choose the function with least implicit conversions. - if (implicitConversions < leastImplicitConversions) { - udfMethod = m; - leastImplicitConversions = implicitConversions; - // Found an exact match - if (leastImplicitConversions == 0) break; - } else if (implicitConversions == leastImplicitConversions){ - // Ambiguous call: two methods with the same number of implicit conversions - udfMethod = null; - } else { - // do nothing if implicitConversions > leastImplicitConversions - } - } + mlist.add(m); } } - return udfMethod; + + return getMethodInternal(mlist, exact, argumentClasses); } - public static Method getUDFMethod(String name, boolean exact, Class ... argumentClasses) { - return getUDFMethod(name, exact, Arrays.asList(argumentClasses)); + public static Method getUDFMethod(String name, Class ... argumentClasses) { + return getUDFMethod(name, Arrays.asList(argumentClasses)); } public static void registerUDAF(String functionName, Class UDAFClass) { @@ -345,7 +350,7 @@ Class udaf = getUDAF(name); if (udaf == null) return null; - return FunctionRegistry.getMethodInternal(udaf, "aggregate", false, + return FunctionRegistry.getMethodInternal(udaf, "iterate", false, argumentClasses); } @@ -363,7 +368,7 @@ return null; return FunctionRegistry.getMethodInternal(udaf, (mode == groupByDesc.Mode.COMPLETE || mode == groupByDesc.Mode.FINAL) - ? "evaluate" : "evaluatePartial", true, + ? "terminate" : "terminatePartial", true, new ArrayList>() ); } @@ -407,4 +412,53 @@ } return o; } + + /** + * Gets the closest matching method corresponding to the argument list from a list of methods. + * + * @param mlist The list of methods to inspect. + * @param exact Boolean to indicate whether this is an exact match or not. + * @param argumentClasses The classes for the argument. + * @return The matching method. + */ + public static Method getMethodInternal(ArrayList mlist, boolean exact, + List> argumentClasses) { + int leastImplicitConversions = Integer.MAX_VALUE; + Method udfMethod = null; + + for(Method m: mlist) { + Class[] argumentTypeInfos = m.getParameterTypes(); + + boolean match = (argumentTypeInfos.length == argumentClasses.size()); + int implicitConversions = 0; + + for(int i=0; i accepted = ObjectInspectorUtils.generalizePrimitive(argumentTypeInfos[i]); + if (accepted.isAssignableFrom(argumentClasses.get(i))) { + // do nothing if match + } else if (!exact && implicitConvertable(argumentClasses.get(i), accepted)) { + implicitConversions ++; + } else { + match = false; + } + } + + if (match) { + // Always choose the function with least implicit conversions. + if (implicitConversions < leastImplicitConversions) { + udfMethod = m; + leastImplicitConversions = implicitConversions; + // Found an exact match + if (leastImplicitConversions == 0) break; + } else if (implicitConversions == leastImplicitConversions){ + // Ambiguous call: two methods with the same number of implicit conversions + udfMethod = null; + } else { + // do nothing if implicitConversions > leastImplicitConversions + } + } + } + return udfMethod; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java (working copy) @@ -17,9 +17,6 @@ */ package org.apache.hadoop.hive.ql.exec; - - -import org.apache.hadoop.hive.ql.metadata.HiveException; //import org.apache.hadoop.hive.serde.ReflectionSerDe; /** @@ -60,12 +57,40 @@ * public boolean aggregatePartial(String partial); * */ -public abstract class UDAF { +public class UDAF { - public UDAF() { } + /** + * The resolver used for method resolution. + */ + UDAFEvaluatorResolver rslv; + + /** + * The default constructor. + */ + public UDAF() { + rslv = new DefaultUDAFEvaluatorResolver(this.getClass()); + } - /** Initialize the aggregation object. - * The class should reset the status of the aggregation if aggregate() was called before. + /** + * The constructor with a particular type of resolver. */ - public abstract void init(); + public UDAF(UDAFEvaluatorResolver rslv) { + this.rslv = rslv; + } + + /** + * Sets the resolver + * + * @param The method resolver to use for method resolution. + */ + public void setResolver(UDAFEvaluatorResolver rslv) { + this.rslv = rslv; + } + + /** + * Gets the resolver. + */ + public UDAFEvaluatorResolver getResolver() { + return rslv; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java (revision 0) @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; + +/** + * The class implements the method resolution for operators like (+, -, *, /, %, |, &, ^). The + * resolution logic is as follows: + * 1. The resolver first tries to find an exact parameter match. + * 2. If 1 fails then it returns the evaluate(Double, Double) method. + */ +public class NumericOpMethodResolver implements UDFMethodResolver { + + /** + * The udfclass for which resolution is needed. + */ + Class udfClass; + + /** + * Constuctor. + */ + public NumericOpMethodResolver(Class udfClass) { + this.udfClass = udfClass; + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.ql.exec.UDFMethodResolver#getEvalMethod(java.util.List) + */ + @Override + public Method getEvalMethod(List> argClasses) + throws AmbiguousMethodException { + assert(argClasses.size() == 2); + + List> pClasses = null; + if (argClasses.get(0) == Void.class || + argClasses.get(1) == Void.class) { + pClasses = new ArrayList>(); + pClasses.add(Double.class); + pClasses.add(Double.class); + } else if (argClasses.get(0) == String.class || + argClasses.get(1) == String.class) { + pClasses = new ArrayList>(); + pClasses.add(Double.class); + pClasses.add(Double.class); + } else if (argClasses.get(0) == argClasses.get(1)) { + pClasses = argClasses; + } else { + pClasses = new ArrayList>(); + pClasses.add(Double.class); + pClasses.add(Double.class); + } + + Method udfMethod = null; + + for(Method m: Arrays.asList(udfClass.getMethods())) { + if (m.getName().equals("evaluate")) { + + Class[] argumentTypeInfos = m.getParameterTypes(); + + boolean match = (argumentTypeInfos.length == pClasses.size()); + + for(int i=0; i accepted = ObjectInspectorUtils.generalizePrimitive(argumentTypeInfos[i]); + if (accepted != pClasses.get(i)) { + match = false; + } + } + + if (match) { + if (udfMethod != null) { + throw new AmbiguousMethodException(udfClass, argClasses); + } + else { + udfMethod = m; + } + } + } + } + return udfMethod; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java (revision 0) @@ -0,0 +1,109 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; + +/** + * The class implements the method resolution for overloaded comparison operators. The + * resolution logic is as follows: + * 1. The resolver first tries to find an exact parameter match. + * 2. If 1 fails and any of the parameters is a date, it converts the other to the date. + * 3. If 1 and 3 fail then it returns the evaluate(Double, Double) method. + */ +public class ComparisonOpMethodResolver implements UDFMethodResolver { + + /** + * The udfclass for which resolution is needed. + */ + private Class udfClass; + + /** + * Constuctor. + */ + public ComparisonOpMethodResolver(Class udfClass) { + this.udfClass = udfClass; + } + + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.ql.exec.UDFMethodResolver#getEvalMethod(java.util.List) + */ + @Override + public Method getEvalMethod(List> argClasses) + throws AmbiguousMethodException { + assert(argClasses.size() == 2); + + List> pClasses = null; + if (argClasses.get(0) == Void.class || + argClasses.get(1) == Void.class) { + pClasses = new ArrayList>(); + pClasses.add(Double.class); + pClasses.add(Double.class); + } + else if (argClasses.get(0) == argClasses.get(1)) { + pClasses = argClasses; + } + else if (argClasses.get(0) == java.sql.Date.class || + argClasses.get(1) == java.sql.Date.class) { + pClasses = new ArrayList>(); + pClasses.add(java.sql.Date.class); + pClasses.add(java.sql.Date.class); + } + else { + pClasses = new ArrayList>(); + pClasses.add(Double.class); + pClasses.add(Double.class); + } + + Method udfMethod = null; + + for(Method m: Arrays.asList(udfClass.getMethods())) { + if (m.getName().equals("evaluate")) { + + Class[] argumentTypeInfos = m.getParameterTypes(); + + boolean match = (argumentTypeInfos.length == pClasses.size()); + + for(int i=0; i accepted = ObjectInspectorUtils.generalizePrimitive(argumentTypeInfos[i]); + if (accepted != pClasses.get(i)) { + match = false; + } + } + + if (match) { + if (udfMethod != null) { + throw new AmbiguousMethodException(udfClass, argClasses); + } + else { + udfMethod = m; + } + } + } + } + return udfMethod; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (working copy) @@ -24,8 +24,6 @@ import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; -import org.apache.hadoop.hive.ql.parse.RowResolver; -import org.apache.hadoop.hive.ql.exec.ColumnInfo; public class exprNodeColumnDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (working copy) @@ -22,8 +22,6 @@ import java.util.List; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; -import org.apache.hadoop.hive.ql.parse.RowResolver; public class exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java (working copy) @@ -23,13 +23,11 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; +import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.parse.RowResolver; /** * The reason that we have to store UDFClass as well as UDFMethod is because @@ -39,12 +37,13 @@ public class exprNodeFuncDesc extends exprNodeDesc implements Serializable { private static final long serialVersionUID = 1L; - private Class UDFClass; + private Class UDFClass; private Method UDFMethod; private ArrayList children; public exprNodeFuncDesc() {} - public exprNodeFuncDesc(TypeInfo typeInfo, Class UDFClass, Method UDFMethod, ArrayList children) { + public exprNodeFuncDesc(TypeInfo typeInfo, Class UDFClass, + Method UDFMethod, ArrayList children) { super(typeInfo); assert(UDFClass != null); this.UDFClass = UDFClass; @@ -53,10 +52,11 @@ this.children = children; } - public Class getUDFClass() { + public Class getUDFClass() { return UDFClass; } - public void setUDFClass(Class UDFClass) { + + public void setUDFClass(Class UDFClass) { this.UDFClass = UDFClass; } public Method getUDFMethod() { Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; import org.apache.hadoop.hive.ql.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -222,7 +223,7 @@ } else { // numPartitionFields = -1 means random partitioning partitionCols = new ArrayList(1); - partitionCols.add(SemanticAnalyzer.getFuncExprNodeDesc("rand")); + partitionCols.add(TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("rand")); } StringBuilder order = new StringBuilder(); Index: ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java (working copy) @@ -21,25 +21,26 @@ import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDAF; +import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; public class aggregationDesc implements java.io.Serializable { private static final long serialVersionUID = 1L; - private Class aggregationClass; + private Class aggregationClass; private java.util.ArrayList parameters; private boolean distinct; public aggregationDesc() {} public aggregationDesc( - final Class aggregationClass, + final Class aggregationClass, final java.util.ArrayList parameters, final boolean distinct) { this.aggregationClass = aggregationClass; this.parameters = parameters; this.distinct = distinct; } - public Class getAggregationClass() { + public Class getAggregationClass() { return this.aggregationClass; } - public void setAggregationClass(final Class aggregationClass) { + public void setAggregationClass(final Class aggregationClass) { this.aggregationClass = aggregationClass; } public java.util.ArrayList getParameters() { Index: ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/tools/LineageInfo.java (working copy) @@ -30,8 +30,8 @@ import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.GraphWalker; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; -import org.apache.hadoop.hive.ql.lib.GraphWalker; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveParser; @@ -76,7 +76,7 @@ /** * Implements the process method for the NodeProcessor interface. */ - public void process(Node nd, NodeProcessorCtx procCtx) + public Object process(Node nd, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { ASTNode pt = (ASTNode)nd; @@ -91,7 +91,7 @@ inputTableList.add(table_name); break; } - + return null; } /** @@ -128,7 +128,7 @@ // Create a list of topop nodes ArrayList topNodes = new ArrayList(); topNodes.add(tree); - ogw.startWalking(topNodes); + ogw.startWalking(topNodes, null); } public static void main(String[] args) throws IOException, ParseException, Index: ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java (working copy) @@ -52,7 +52,7 @@ * @param ndStack the operators encountered so far * @throws SemanticException */ - public void dispatch(Node nd, Stack ndStack) + public Object dispatch(Node nd, Stack ndStack, Object... nodeOutputs) throws SemanticException { // find the firing rule @@ -75,8 +75,11 @@ proc = procRules.get(rule); // Do nothing in case proc is null - if (proc != null) + if (proc != null) { // Call the process function - proc.process(nd, procCtx); + return proc.process(nd, procCtx, nodeOutputs); + } + else + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessorCtx.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessorCtx.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessorCtx.java (working copy) @@ -21,5 +21,5 @@ /** * Operator Processor Context */ -public abstract class NodeProcessorCtx { +public interface NodeProcessorCtx { } Index: ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java (working copy) @@ -30,8 +30,10 @@ * generic process for all ops that don't have specific implementations * @param nd operator to process * @param procCtx operator processor context + * @param nodeOutputs A variable argument list of outputs from other nodes in the walk + * @return Object to be returned by the process call * @throws SemanticException */ - public void process(Node nd, NodeProcessorCtx procCtx) + public Object process(Node nd, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException; } Index: ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java (working copy) @@ -29,11 +29,14 @@ public interface Dispatcher { /** - * dispatcher function - * @param nd operator to process - * @param Stack operator stack to process + * Dispatcher function. + * @param nd operator to process. + * @param Stack operator stack to process. + * @param nodeOutputs The argument list of outputs from processing other nodes that are + * passed to this dispatcher from the walker. + * @return Object The return object from the processing call. * @throws SemanticException */ - public abstract void dispatch(Node nd, Stack stack) + public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) throws SemanticException; } Index: ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java (working copy) @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.lib; import java.util.Collection; +import java.util.HashMap; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -31,9 +32,11 @@ * starting point for walking. * * @param startNodes list of starting operators + * @param nodeOutput If this parameter is not null, the call to the function returns the + * map from node to objects returned by the processors. * @throws SemanticException */ - public void startWalking(Collection startNodes) + public void startWalking(Collection startNodes, HashMap nodeOutput) throws SemanticException; } \ No newline at end of file Index: ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (working copy) @@ -20,14 +20,12 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.Stack; -import org.apache.hadoop.hive.ql.exec.FileSinkOperator; -import org.apache.hadoop.hive.ql.exec.ScriptOperator; -import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.parse.SemanticException; /** @@ -39,7 +37,7 @@ protected Stack opStack; private List toWalk = new ArrayList(); - private Set dispatchedList = new HashSet(); + private HashMap retMap = new HashMap(); private Dispatcher dispatcher; /** @@ -63,7 +61,7 @@ * @return the doneList */ public Set getDispatchedList() { - return dispatchedList; + return retMap.keySet(); } /** @@ -73,18 +71,32 @@ * @throws SemanticException */ public void dispatch(Node nd, Stack ndStack) throws SemanticException { - this.dispatcher.dispatch(nd, ndStack); - this.dispatchedList.add(nd); + Object[] nodeOutputs = null; + if (nd.getChildren() != null) { + nodeOutputs = new Object[nd.getChildren().size()]; + int i = 0; + for(Node child: nd.getChildren()) { + nodeOutputs[i++] = retMap.get(child); + } + } + + Object retVal = dispatcher.dispatch(nd, ndStack, nodeOutputs); + retMap.put(nd, retVal); } /** * starting point for walking * @throws SemanticException */ - public void startWalking(Collection startNodes) throws SemanticException { + public void startWalking(Collection startNodes, HashMap nodeOutput) throws SemanticException { toWalk.addAll(startNodes); - while(toWalk.size() > 0) - walk(toWalk.remove(0)); + while(toWalk.size() > 0) { + Node nd = toWalk.remove(0); + walk(nd); + if (nodeOutput != null) { + nodeOutput.put(nd, retMap.get(nd)); + } + } } /** Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (working copy) @@ -154,7 +154,7 @@ } // Create function desc - desc = SemanticAnalyzer.getXpathOrFuncExprNodeDesc(expr, isFunction, children); + desc = TypeCheckProcFactory.DefaultExprProcessor.getXpathOrFuncExprNodeDesc(expr, isFunction, children); if (desc instanceof exprNodeFuncDesc && ( ((exprNodeFuncDesc)desc).getUDFMethod().getDeclaringClass().equals(UDFOPAnd.class) @@ -245,7 +245,7 @@ if (this.prunerExpr == null) this.prunerExpr = desc; else - this.prunerExpr = SemanticAnalyzer.getFuncExprNodeDesc("OR", this.prunerExpr, desc); + this.prunerExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("OR", this.prunerExpr, desc); } } @@ -264,7 +264,7 @@ if (this.prunerExpr == null) this.prunerExpr = desc; else - this.prunerExpr = SemanticAnalyzer.getFuncExprNodeDesc("AND", this.prunerExpr, desc); + this.prunerExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("AND", this.prunerExpr, desc); } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (working copy) @@ -70,14 +70,15 @@ return ret.toString(); } - public void process(Node nd, NodeProcessorCtx ctx) throws SemanticException { + public Object process(Node nd, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException { Operator op = (Operator)nd; if (opMap.get(op) == null) { opMap.put(op, curNum++); } out.println("[" + opMap.get(op) + "] " + op.getClass().getName() + " =p=> " + getParents(op) + " =c=> " + getChildren(op)); if(op.getConf() == null) { - return; + return null; } + return null; } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 0) @@ -0,0 +1,628 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.ql.exec.ColumnInfo; +import org.apache.hadoop.hive.ql.exec.FunctionRegistry; +import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.lib.NodeProcessor; +import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; +import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc; +import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc; +import org.apache.hadoop.hive.ql.typeinfo.TypeInfo; +import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.ql.udf.UDFOPPositive; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; + +/** + * The Factory for creating typecheck processors. The typecheck processors are used to + * processes the syntax trees for expressions and convert them into expression Node + * Descriptor trees. They also introduce the correct conversion functions to do proper + * implicit conversion. + */ +public class TypeCheckProcFactory { + + /** + * Function to do groupby subexpression elimination. This is called by all the processors initially. + * As an example, consider the query + * select a+b, count(1) from T group by a+b; + * Then a+b is already precomputed in the group by operators key, so we substitute a+b in the select + * list with the internal column name of the a+b expression that appears in the in input row resolver. + * + * @param nd The node that is being inspected. + * @param procCtx The processor context. + * + * @return exprNodeColumnDesc. + */ + public static exprNodeDesc processGByExpr(Node nd, Object procCtx) + throws SemanticException { + // We recursively create the exprNodeDesc. Base cases: when we encounter + // a column ref, we convert that into an exprNodeColumnDesc; when we encounter + // a constant, we convert that into an exprNodeConstantDesc. For others we just + // build the exprNodeFuncDesc with recursively built children. + ASTNode expr = (ASTNode)nd; + TypeCheckCtx ctx = (TypeCheckCtx) procCtx; + RowResolver input = ctx.getInputRR(); + exprNodeDesc desc = null; + + // If the current subExpression is pre-calculated, as in Group-By etc. + ColumnInfo colInfo = input.get("", expr.toStringTree()); + if (colInfo != null) { + desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); + return desc; + } + return desc; + } + + /** + * Processor for processing NULL expression. + */ + public static class NullExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); + if (desc != null) { + return desc; + } + + return new exprNodeNullDesc(); + } + + } + + /** + * Factory method to get NullExprProcessor. + * @return NullExprProcessor. + */ + public static NullExprProcessor getNullExprProcessor() { + return new NullExprProcessor(); + } + + /** + * Processor for processing numeric constants. + */ + public static class NumExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); + if (desc != null) { + return desc; + } + + Number v = null; + ASTNode expr = (ASTNode)nd; + // The expression can be any one of Double, Long and Integer. We + // try to parse the expression in that order to ensure that the + // most specific type is used for conversion. + try { + v = Double.valueOf(expr.getText()); + v = Long.valueOf(expr.getText()); + v = Integer.valueOf(expr.getText()); + } catch (NumberFormatException e) { + // do nothing here, we will throw an exception in the following block + } + if (v == null) { + throw new SemanticException(ErrorMsg.INVALID_NUMERICAL_CONSTANT.getMsg(expr)); + } + return new exprNodeConstantDesc(v); + } + + } + + /** + * Factory method to get NumExprProcessor. + * @return NumExprProcessor. + */ + public static NumExprProcessor getNumExprProcessor() { + return new NumExprProcessor(); + } + + /** + * Processor for processing string constants. + */ + public static class StrExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); + if (desc != null) { + return desc; + } + + ASTNode expr = (ASTNode)nd; + String str = null; + + switch (expr.getToken().getType()) { + case HiveParser.Identifier: + str = BaseSemanticAnalyzer.unescapeIdentifier(expr.getText()); + break; + case HiveParser.StringLiteral: + str = BaseSemanticAnalyzer.unescapeSQLString(expr.getText()); + break; + case HiveParser.TOK_CHARSETLITERAL: + str = BaseSemanticAnalyzer.charSetString(expr.getChild(0).getText(), expr.getChild(1).getText()); + break; + default: + assert false; + } + return new exprNodeConstantDesc(String.class, str); + } + + } + + /** + * Factory method to get StrExprProcessor. + * @return StrExprProcessor. + */ + public static StrExprProcessor getStrExprProcessor() { + return new StrExprProcessor(); + } + + /** + * Processor for boolean constants. + */ + public static class BoolExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); + if (desc != null) { + return desc; + } + + ASTNode expr = (ASTNode)nd; + Boolean bool = null; + + switch (expr.getToken().getType()) { + case HiveParser.KW_TRUE: + bool = Boolean.TRUE; + break; + case HiveParser.KW_FALSE: + bool = Boolean.FALSE; + break; + default: + assert false; + } + return new exprNodeConstantDesc(Boolean.class, bool); + } + + } + + /** + * Factory method to get BoolExprProcessor. + * @return BoolExprProcessor. + */ + public static BoolExprProcessor getBoolExprProcessor() { + return new BoolExprProcessor(); + } + + /** + * Processor for table columns + */ + public static class ColumnExprProcessor implements NodeProcessor { + + @Override + public Object process(Node nd, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); + if (desc != null) { + return desc; + } + + ASTNode expr = (ASTNode)nd; + TypeCheckCtx ctx = (TypeCheckCtx)procCtx; + RowResolver input = ctx.getInputRR(); + + if(expr.getType() != HiveParser.TOK_COLREF) { + ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr)); + return null; + } + + String tabAlias = null; + String colName = null; + + if (expr.getChildCount() != 1) { + tabAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()); + colName = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(1).getText()); + } + else { + colName = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()); + } + + if (colName == null) { + ctx.setError(ErrorMsg.INVALID_XPATH.getMsg(expr)); + return null; + } + + ColumnInfo colInfo = input.get(tabAlias, colName); + + if (colInfo == null && input.getIsExprResolver()) { + ctx.setError(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr)); + return null; + } + else if (tabAlias != null && !input.hasTableAlias(tabAlias)) { + ctx.setError(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(expr.getChild(0))); + return null; + } else if (colInfo == null) { + ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(tabAlias == null? expr.getChild(0) : expr.getChild(1))); + return null; + } + + return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); + } + + } + + /** + * Factory method to get ColumnExprProcessor. + * @return ColumnExprProcessor. + */ + public static ColumnExprProcessor getColumnExprProcessor() { + return new ColumnExprProcessor(); + } + + /** + * The default processor for typechecking. + */ + public static class DefaultExprProcessor implements NodeProcessor { + + static HashMap specialUnaryOperatorTextHashMap; + static HashMap specialFunctionTextHashMap; + static HashMap conversionFunctionTextHashMap; + static { + specialUnaryOperatorTextHashMap = new HashMap(); + specialUnaryOperatorTextHashMap.put(HiveParser.PLUS, "positive"); + specialUnaryOperatorTextHashMap.put(HiveParser.MINUS, "negative"); + specialFunctionTextHashMap = new HashMap(); + specialFunctionTextHashMap.put(HiveParser.TOK_ISNULL, "isnull"); + specialFunctionTextHashMap.put(HiveParser.TOK_ISNOTNULL, "isnotnull"); + conversionFunctionTextHashMap = new HashMap(); + conversionFunctionTextHashMap.put(HiveParser.TOK_BOOLEAN, Boolean.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_TINYINT, Byte.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_SMALLINT, Short.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_INT, Integer.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_BIGINT, Long.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_FLOAT, Float.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_DOUBLE, Double.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_STRING, String.class.getName()); + conversionFunctionTextHashMap.put(HiveParser.TOK_DATE, java.sql.Date.class.getName()); + } + + public static boolean isRedundantConversionFunction(ASTNode expr, boolean isFunction, ArrayList children) { + if (!isFunction) return false; + // children is always one less than the expr.getChildCount(), since the latter contains function name. + assert(children.size() == expr.getChildCount() - 1); + // conversion functions take a single parameter + if (children.size() != 1) return false; + String funcText = conversionFunctionTextHashMap.get(((ASTNode)expr.getChild(0)).getType()); + // not a conversion function + if (funcText == null) return false; + // return true when the child type and the conversion target type is the same + return children.get(0).getTypeInfo().getPrimitiveClass().getName().equals(funcText); + } + + public static String getFunctionText(ASTNode expr, boolean isFunction) { + String funcText = null; + if (!isFunction) { + // For operator, the function name is the operator text, unless it's in our special dictionary + if (expr.getChildCount() == 1) { + funcText = specialUnaryOperatorTextHashMap.get(expr.getType()); + } + if (funcText == null) { + funcText = expr.getText(); + } + } else { + // For TOK_FUNCTION, the function name is stored in the first child, unless it's in our + // special dictionary. + assert(expr.getChildCount() >= 1); + int funcType = ((ASTNode)expr.getChild(0)).getType(); + funcText = specialFunctionTextHashMap.get(funcType); + if (funcText == null) { + funcText = conversionFunctionTextHashMap.get(funcType); + } + if (funcText == null) { + funcText = ((ASTNode)expr.getChild(0)).getText(); + } + } + return funcText; + } + + + /** + * Get the exprNodeDesc + * @param name + * @param children + * @return + */ + public static exprNodeDesc getFuncExprNodeDesc(String name, exprNodeDesc... children) { + return getFuncExprNodeDesc(name, Arrays.asList(children)); + } + + /** + * This function create an ExprNodeDesc for a UDF function given the children (arguments). + * It will insert implicit type conversion functions if necessary. + * @throws SemanticException + */ + public static exprNodeDesc getFuncExprNodeDesc(String udfName, List children) { + // Find the corresponding method + ArrayList> argumentClasses = new ArrayList>(children.size()); + for(int i=0; i ch = new ArrayList(); + Class[] pTypes = udfMethod.getParameterTypes(); + + for (int i = 0; i < children.size(); i++) + { + exprNodeDesc desc = children.get(i); + Class pType = ObjectInspectorUtils.generalizePrimitive(pTypes[i]); + if (desc instanceof exprNodeNullDesc) { + exprNodeConstantDesc newCh = new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(pType), null); + ch.add(newCh); + } else if (pType.isAssignableFrom(argumentClasses.get(i))) { + // no type conversion needed + ch.add(desc); + } else { + // must be implicit type conversion + Class from = argumentClasses.get(i); + Class to = pType; + assert(FunctionRegistry.implicitConvertable(from, to)); + Method m = FunctionRegistry.getUDFMethod(to.getName(), from); + assert(m != null); + Class c = FunctionRegistry.getUDFClass(to.getName()); + assert(c != null); + + // get the conversion method + ArrayList conversionArg = new ArrayList(1); + conversionArg.add(desc); + ch.add(new exprNodeFuncDesc( + TypeInfoFactory.getPrimitiveTypeInfo(pType), + c, m, conversionArg)); + } + } + + exprNodeFuncDesc desc = new exprNodeFuncDesc( + TypeInfoFactory.getPrimitiveTypeInfo(udfMethod.getReturnType()), + FunctionRegistry.getUDFClass(udfName), + udfMethod, ch); + return desc; + } + + static exprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, boolean isFunction, + ArrayList children) + throws SemanticException { + // return the child directly if the conversion is redundant. + if (isRedundantConversionFunction(expr, isFunction, children)) { + assert(children.size() == 1); + assert(children.get(0) != null); + return children.get(0); + } + String funcText = getFunctionText(expr, isFunction); + exprNodeDesc desc; + if (funcText.equals(".")) { + // "." : FIELD Expression + assert(children.size() == 2); + // Only allow constant field name for now + assert(children.get(1) instanceof exprNodeConstantDesc); + exprNodeDesc object = children.get(0); + exprNodeConstantDesc fieldName = (exprNodeConstantDesc)children.get(1); + assert(fieldName.getValue() instanceof String); + + // Calculate result TypeInfo + String fieldNameString = (String)fieldName.getValue(); + TypeInfo objectTypeInfo = object.getTypeInfo(); + + // Allow accessing a field of list element structs directly from a list + boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST); + if (isList) { + objectTypeInfo = objectTypeInfo.getListElementTypeInfo(); + } + if (objectTypeInfo.getCategory() != Category.STRUCT) { + throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr)); + } + TypeInfo t = objectTypeInfo.getStructFieldTypeInfo(fieldNameString); + if (isList) { + t = TypeInfoFactory.getListTypeInfo(t); + } + + desc = new exprNodeFieldDesc(t, children.get(0), fieldNameString, isList); + + } else if (funcText.equals("[")){ + // "[]" : LSQUARE/INDEX Expression + assert(children.size() == 2); + + // Check whether this is a list or a map + TypeInfo myt = children.get(0).getTypeInfo(); + + if (myt.getCategory() == Category.LIST) { + // Only allow constant integer index for now + if (!(children.get(1) instanceof exprNodeConstantDesc) + || !(((exprNodeConstantDesc)children.get(1)).getValue() instanceof Integer)) { + throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr)); + } + + // Calculate TypeInfo + TypeInfo t = myt.getListElementTypeInfo(); + desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); + } + else if (myt.getCategory() == Category.MAP) { + // Only allow only constant indexes for now + if (!(children.get(1) instanceof exprNodeConstantDesc)) { + throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg(expr)); + } + if (!(((exprNodeConstantDesc)children.get(1)).getValue().getClass() == + myt.getMapKeyTypeInfo().getPrimitiveClass())) { + throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE.getMsg(expr)); + } + // Calculate TypeInfo + TypeInfo t = myt.getMapValueTypeInfo(); + + desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); + } + else { + throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr, + myt.getTypeName())); + } + } else { + // other operators or functions + Class udf = FunctionRegistry.getUDFClass(funcText); + if (udf == null) { + if (isFunction) + throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr.getChild(0))); + else + throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr)); + } + + desc = getFuncExprNodeDesc(funcText, children); + if (desc == null) { + ArrayList> argumentClasses = new ArrayList>(children.size()); + for(int i=0; i children = new ArrayList(expr.getChildCount() - childrenBegin); + for (int ci=childrenBegin; ci convertedParameters; - Method aggregateMethod; - Method evaluateMethod; + Class retType; + Class evalClass; } /** * Returns the UDAFInfo struct for the aggregation * @param aggName The name of the UDAF. * @param mode The mode of the aggregation. This affects the evaluate method. - * @param aggClasses The classes of the parameters to the UDAF. * @param aggParameters The actual exprNodeDesc of the parameters. * @param aggTree The ASTNode node of the UDAF in the query. * @return UDAFInfo * @throws SemanticException when the UDAF is not found or has problems. */ - UDAFInfo getUDAFInfo(String aggName, groupByDesc.Mode mode, ArrayList> aggClasses, + UDAFInfo getUDAFInfo(String aggName, groupByDesc.Mode mode, ArrayList aggParameters, ASTNode aggTree) throws SemanticException { UDAFInfo r = new UDAFInfo(); - r.aggregateMethod = FunctionRegistry.getUDAFMethod(aggName, aggClasses); - if (null == r.aggregateMethod) { - String reason = "Looking for UDAF \"" + aggName + "\" with parameters " + aggClasses; + ArrayList> aggClasses = new ArrayList>(); + for(exprNodeDesc expr: aggParameters) { + aggClasses.add(expr.getTypeInfo().getPrimitiveClass()); + } + r.evalClass = FunctionRegistry.getUDAFEvaluator(aggName, aggClasses); + if (null == r.evalClass) { + String reason = "Looking for UDAF Evaluator\"" + aggName + "\" with parameters " + aggClasses; throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((ASTNode)aggTree.getChild(0), reason)); } + + Method aggregateMethod = null; + for(Method m: r.evalClass.getMethods()) { + if (m.getName().equalsIgnoreCase("iterate")) { + aggregateMethod = m; + } + } + + if (null == aggregateMethod) { + String reason = "Looking for UDAF Evaluator Iterator\"" + aggName + "\" with parameters " + aggClasses; + throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((ASTNode)aggTree.getChild(0), reason)); + } - r.convertedParameters = convertParameters(r.aggregateMethod, aggParameters); + r.convertedParameters = convertParameters(aggregateMethod, aggParameters); - r.evaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode); - if (r.evaluateMethod == null) { - String reason = "UDAF \"" + aggName + "\" does not have evaluate()/evaluatePartial() methods."; + Method evaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode); + String funcName = (mode == groupByDesc.Mode.COMPLETE || mode == groupByDesc.Mode.FINAL) ? "terminate" : "terminatePartial"; + for(Method m: r.evalClass.getMethods()) { + if (m.getName().equalsIgnoreCase(funcName)) { + evaluateMethod = m; + } + } + if (evaluateMethod == null) { + String reason = "UDAF \"" + aggName + "\" does not have terminate()/terminatePartial() methods."; throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)aggTree.getChild(0), reason)); } + r.retType = evaluateMethod.getReturnType(); return r; } @@ -1279,7 +1301,6 @@ Class aggClass = FunctionRegistry.getUDAF(aggName); assert (aggClass != null); ArrayList aggParameters = new ArrayList(); - ArrayList> aggClasses = new ArrayList>(); // 0 is the function name for (int i = 1; i < value.getChildCount(); i++) { String text = value.getChild(i).toStringTree(); @@ -1292,16 +1313,15 @@ String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName())); - aggClasses.add(paraExprInfo.getType().getPrimitiveClass()); } - UDAFInfo udaf = getUDAFInfo(aggName, mode, aggClasses, aggParameters, value); + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggParameters, value); - aggregations.add(new aggregationDesc(aggClass, udaf.convertedParameters, + aggregations.add(new aggregationDesc(udaf.evalClass, udaf.convertedParameters, value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), - udaf.evaluateMethod.getReturnType())); + udaf.retType)); } return @@ -1352,12 +1372,9 @@ String aggName = value.getChild(0).getText(); Class aggClass = FunctionRegistry.getUDAF(aggName); assert (aggClass != null); - Method aggEvaluateMethod = null; - ArrayList aggParameters = null; + ArrayList aggParameters = new ArrayList(); if (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI) { - ArrayList> aggClasses = new ArrayList>(); - ArrayList params = new ArrayList(); // 0 is the function name for (int i = 1; i < value.getChildCount(); i++) { String text = value.getChild(i).toStringTree(); @@ -1369,17 +1386,11 @@ String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); - params.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName())); - aggClasses.add(paraExprInfo.getType().getPrimitiveClass()); + aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName())); } - UDAFInfo udaf = getUDAFInfo(aggName, mode, aggClasses, params, value); - aggParameters = udaf.convertedParameters; - aggEvaluateMethod = udaf.evaluateMethod; } else { - aggParameters = new ArrayList(); - aggEvaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode); String text = entry.getKey(); ColumnInfo paraExprInfo = groupByInputRowResolver.get("",text); if (paraExprInfo == null) { @@ -1390,10 +1401,12 @@ aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression)); } - aggregations.add(new aggregationDesc(aggClass, aggParameters, ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)))); + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggParameters, value); + aggregations.add(new aggregationDesc(udaf.evalClass, udaf.convertedParameters, + ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)))); groupByOutputRowResolver.put("", value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(), - aggEvaluateMethod.getReturnType())); + udaf.retType)); } return putOpInsertMap( @@ -1423,7 +1436,7 @@ List grpByExprs = getGroupByForClause(parseInfo, dest); for (int i = 0; i < grpByExprs.size(); ++i) { ASTNode grpbyExpr = grpByExprs.get(i); - exprNodeDesc grpByExprNode = genExprNodeDesc(qb.getMetaData(), grpbyExpr, groupByInputRowResolver); + exprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr, groupByInputRowResolver); groupByKeys.add(grpByExprNode); String field = (Integer.valueOf(i)).toString(); @@ -1440,7 +1453,7 @@ ASTNode parameter = (ASTNode) value.getChild(i); String text = parameter.toStringTree(); if (groupByOutputRowResolver.get("",text) == null) { - exprNodeDesc distExprNode = genExprNodeDesc(qb.getMetaData(), parameter, groupByInputRowResolver); + exprNodeDesc distExprNode = genExprNodeDesc(parameter, groupByInputRowResolver); groupByKeys.add(distExprNode); numDistn++; String field = (Integer.valueOf(grpByExprs.size() + numDistn -1)).toString(); @@ -1464,19 +1477,18 @@ // 0 is the function name for (int i = 1; i < value.getChildCount(); i++) { ASTNode paraExpr = (ASTNode)value.getChild(i); - exprNodeDesc paraExprNode = genExprNodeDesc(qb.getMetaData(), paraExpr, groupByInputRowResolver); + exprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, groupByInputRowResolver); aggParameters.add(paraExprNode); - aggClasses.add(paraExprNode.getTypeInfo().getPrimitiveClass()); } - UDAFInfo udaf = getUDAFInfo(aggName, mode, aggClasses, aggParameters, value); + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggParameters, value); - aggregations.add(new aggregationDesc(aggClass, udaf.convertedParameters, + aggregations.add(new aggregationDesc(udaf.evalClass, udaf.convertedParameters, value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)); groupByOutputRowResolver.put("",value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() -1).toString(), - udaf.evaluateMethod.getReturnType())); + udaf.retType)); } return putOpInsertMap( @@ -1506,7 +1518,7 @@ Class from = desc.getTypeInfo().getPrimitiveClass(); Class to = pType; assert(FunctionRegistry.implicitConvertable(from, to)); - Method conv = FunctionRegistry.getUDFMethod(to.getName(), true, from); + Method conv = FunctionRegistry.getUDFMethod(to.getName(), from); assert(conv != null); Class c = FunctionRegistry.getUDFClass(to.getName()); assert(c != null); @@ -1635,7 +1647,7 @@ List grpByExprs = getGroupByForClause(parseInfo, dest); for (int i = 0; i < grpByExprs.size(); ++i) { ASTNode grpbyExpr = grpByExprs.get(i); - reduceKeys.add(genExprNodeDesc(qb.getMetaData(), grpbyExpr, reduceSinkInputRowResolver)); + reduceKeys.add(genExprNodeDesc(grpbyExpr, reduceSinkInputRowResolver)); String text = grpbyExpr.toStringTree(); if (reduceSinkOutputRowResolver.get("", text) == null) { reduceSinkOutputRowResolver.put("", text, @@ -1654,7 +1666,7 @@ ASTNode parameter = (ASTNode) value.getChild(i); String text = parameter.toStringTree(); if (reduceSinkOutputRowResolver.get("",text) == null) { - reduceKeys.add(genExprNodeDesc(qb.getMetaData(), parameter, reduceSinkInputRowResolver)); + reduceKeys.add(genExprNodeDesc(parameter, reduceSinkInputRowResolver)); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + Integer.valueOf(reduceKeys.size() - 1).toString(), reduceKeys.get(reduceKeys.size()-1).getTypeInfo())); @@ -1673,7 +1685,7 @@ ASTNode parameter = (ASTNode) value.getChild(i); String text = parameter.toStringTree(); if (reduceSinkOutputRowResolver.get("",text) == null) { - reduceValues.add(genExprNodeDesc(qb.getMetaData(), parameter, reduceSinkInputRowResolver)); + reduceValues.add(genExprNodeDesc(parameter, reduceSinkInputRowResolver)); reduceSinkOutputRowResolver.put("", text, new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + Integer.valueOf(reduceValues.size() - 1).toString(), reduceValues.get(reduceValues.size()-1).getTypeInfo())); @@ -1783,12 +1795,9 @@ HashMap aggregationTrees = parseInfo .getAggregationExprsForClause(dest); for (Map.Entry entry : aggregationTrees.entrySet()) { - ASTNode value = entry.getValue(); - String aggName = value.getChild(0).getText(); - Class aggClass = FunctionRegistry.getUDAF(aggName); - Method aggEvaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode); - assert (aggClass != null); ArrayList aggParameters = new ArrayList(); + ArrayList> aggParamTypes = new ArrayList>(); + ASTNode value = entry.getValue(); String text = entry.getKey(); ColumnInfo paraExprInfo = groupByInputRowResolver2.get("",text); if (paraExprInfo == null) { @@ -1797,10 +1806,18 @@ String paraExpression = paraExprInfo.getInternalName(); assert(paraExpression != null); aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression)); - aggregations.add(new aggregationDesc(aggClass, aggParameters, ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)))); + aggParamTypes.add(paraExprInfo.getType().getPrimitiveClass()); + + String aggName = value.getChild(0).getText(); + Class aggClass = FunctionRegistry.getUDAF(aggName); + assert (aggClass != null); + + UDAFInfo udaf = getUDAFInfo(aggName, mode, aggParameters, value); + aggregations.add(new aggregationDesc(udaf.evalClass, udaf.convertedParameters, + ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI)))); groupByOutputRowResolver2.put("", value.toStringTree(), new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(), - aggEvaluateMethod.getReturnType())); + udaf.retType)); } return putOpInsertMap( @@ -2099,7 +2116,7 @@ // cannot convert to complex types column = null; } else { - column = getFuncExprNodeDesc(tableFieldTypeInfo.getPrimitiveClass().getName(), column); + column = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(tableFieldTypeInfo.getPrimitiveClass().getName(), column); } if (column == null) { String reason = "Cannot convert column " + i + " from " + rowFieldTypeInfo + " to " @@ -2183,7 +2200,7 @@ int ccount = partitionExprs.getChildCount(); for(int i=0; i exprs = joinTree.getExpressions().get(pos); for (int i = 0; i < exprs.size(); i++) { ASTNode expr = exprs.get(i); - reduceKeys.add(genExprNodeDesc(qb.getMetaData(), expr, inputRS)); + reduceKeys.add(genExprNodeDesc(expr, inputRS)); } // Walk over the input row resolver and copy in the output @@ -2403,7 +2420,7 @@ // Add implicit type conversion if necessary for(int i=0; i children = new ArrayList(expr.getChildCount() - childrenBegin); - for (int ci=childrenBegin; ci opRules = new LinkedHashMap(); + StringBuilder sb = new StringBuilder(); + Formatter fm = new Formatter(sb); + opRules.put(new RuleRegExp("R1", HiveParser.TOK_NULL + "%"), TypeCheckProcFactory.getNullExprProcessor()); + opRules.put(new RuleRegExp("R2", HiveParser.Number + "%"), TypeCheckProcFactory.getNumExprProcessor()); + opRules.put(new RuleRegExp("R3", HiveParser.Identifier + "%|" + + HiveParser.StringLiteral + "%|" + + HiveParser.TOK_CHARSETLITERAL + "%"), + TypeCheckProcFactory.getStrExprProcessor()); + opRules.put(new RuleRegExp("R4", HiveParser.KW_TRUE + "%|" + HiveParser.KW_FALSE + "%"), + TypeCheckProcFactory.getBoolExprProcessor()); + opRules.put(new RuleRegExp("R4", HiveParser.TOK_COLREF + "%"), TypeCheckProcFactory.getColumnExprProcessor()); - /** - * Generates expression node from a TOK_COLREF AST Node - * @param expr Antrl node - * @param input row resolver for this col reference - * @return exprNodeDesc or null if ASTNode is not a TOK_COLREF - * @throws SemanticException - */ - private exprNodeDesc genExprNodeDescFromColRef(ASTNode expr, RowResolver input) - throws SemanticException { - if(expr.getType() != HiveParser.TOK_COLREF) { - throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(expr)); + // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along + Dispatcher disp = new DefaultRuleDispatcher(TypeCheckProcFactory.getDefaultExprProcessor(), opRules, tcCtx); + GraphWalker ogw = new DefaultGraphWalker(disp); + + // Create a list of topop nodes + ArrayList topNodes = new ArrayList(); + topNodes.add(expr); + HashMap nodeOutputs = new HashMap(); + ogw.startWalking(topNodes, nodeOutputs); + exprNodeDesc desc = (exprNodeDesc)nodeOutputs.get(expr); + if (desc == null) { + throw new SemanticException(tcCtx.getError()); } - exprNodeDesc desc; - ColumnInfo colInfo; - String tabAlias = null; - String colName = null; - if (expr.getChildCount() != 1) { - tabAlias = unescapeIdentifier(expr.getChild(0).getText()); - colName = unescapeIdentifier(expr.getChild(1).getText()); - } - else { - colName = unescapeIdentifier(expr.getChild(0).getText()); - } - if (colName == null) { - throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr)); - } - - colInfo = input.get(tabAlias, colName); - - if (colInfo == null && input.getIsExprResolver()) { - throw new SemanticException(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr)); - } - else if (tabAlias != null && !input.hasTableAlias(tabAlias)) { - throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(expr.getChild(0))); - } else if (colInfo == null) { - throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(tabAlias == null? expr.getChild(0) : expr.getChild(1))); - } - - desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()); return desc; } - - static HashMap specialUnaryOperatorTextHashMap; - static HashMap specialFunctionTextHashMap; - static HashMap conversionFunctionTextHashMap; - static { - specialUnaryOperatorTextHashMap = new HashMap(); - specialUnaryOperatorTextHashMap.put(HiveParser.PLUS, "positive"); - specialUnaryOperatorTextHashMap.put(HiveParser.MINUS, "negative"); - specialFunctionTextHashMap = new HashMap(); - specialFunctionTextHashMap.put(HiveParser.TOK_ISNULL, "isnull"); - specialFunctionTextHashMap.put(HiveParser.TOK_ISNOTNULL, "isnotnull"); - conversionFunctionTextHashMap = new HashMap(); - conversionFunctionTextHashMap.put(HiveParser.TOK_BOOLEAN, Boolean.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_TINYINT, Byte.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_SMALLINT, Short.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_INT, Integer.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_BIGINT, Long.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_FLOAT, Float.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_DOUBLE, Double.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_STRING, String.class.getName()); - conversionFunctionTextHashMap.put(HiveParser.TOK_DATE, java.sql.Date.class.getName()); - } - public static boolean isRedundantConversionFunction(ASTNode expr, boolean isFunction, ArrayList children) { - if (!isFunction) return false; - // children is always one less than the expr.getChildCount(), since the latter contains function name. - assert(children.size() == expr.getChildCount() - 1); - // conversion functions take a single parameter - if (children.size() != 1) return false; - String funcText = conversionFunctionTextHashMap.get(((ASTNode)expr.getChild(0)).getType()); - // not a conversion function - if (funcText == null) return false; - // return true when the child type and the conversion target type is the same - return children.get(0).getTypeInfo().getPrimitiveClass().getName().equals(funcText); - } - - public static String getFunctionText(ASTNode expr, boolean isFunction) { - String funcText = null; - if (!isFunction) { - // For operator, the function name is the operator text, unless it's in our special dictionary - if (expr.getChildCount() == 1) { - funcText = specialUnaryOperatorTextHashMap.get(expr.getType()); - } - if (funcText == null) { - funcText = expr.getText(); - } - } else { - // For TOK_FUNCTION, the function name is stored in the first child, unless it's in our - // special dictionary. - assert(expr.getChildCount() >= 1); - int funcType = ((ASTNode)expr.getChild(0)).getType(); - funcText = specialFunctionTextHashMap.get(funcType); - if (funcText == null) { - funcText = conversionFunctionTextHashMap.get(funcType); - } - if (funcText == null) { - funcText = ((ASTNode)expr.getChild(0)).getText(); - } - } - return funcText; - } - - static exprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, boolean isFunction, - ArrayList children) - throws SemanticException { - // return the child directly if the conversion is redundant. - if (isRedundantConversionFunction(expr, isFunction, children)) { - assert(children.size() == 1); - assert(children.get(0) != null); - return children.get(0); - } - String funcText = getFunctionText(expr, isFunction); - exprNodeDesc desc; - if (funcText.equals(".")) { - // "." : FIELD Expression - assert(children.size() == 2); - // Only allow constant field name for now - assert(children.get(1) instanceof exprNodeConstantDesc); - exprNodeDesc object = children.get(0); - exprNodeConstantDesc fieldName = (exprNodeConstantDesc)children.get(1); - assert(fieldName.getValue() instanceof String); - - // Calculate result TypeInfo - String fieldNameString = (String)fieldName.getValue(); - TypeInfo objectTypeInfo = object.getTypeInfo(); - - // Allow accessing a field of list element structs directly from a list - boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST); - if (isList) { - objectTypeInfo = objectTypeInfo.getListElementTypeInfo(); - } - if (objectTypeInfo.getCategory() != Category.STRUCT) { - throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr)); - } - TypeInfo t = objectTypeInfo.getStructFieldTypeInfo(fieldNameString); - if (isList) { - t = TypeInfoFactory.getListTypeInfo(t); - } - - desc = new exprNodeFieldDesc(t, children.get(0), fieldNameString, isList); - - } else if (funcText.equals("[")){ - // "[]" : LSQUARE/INDEX Expression - assert(children.size() == 2); - - // Check whether this is a list or a map - TypeInfo myt = children.get(0).getTypeInfo(); - - if (myt.getCategory() == Category.LIST) { - // Only allow constant integer index for now - if (!(children.get(1) instanceof exprNodeConstantDesc) - || !(((exprNodeConstantDesc)children.get(1)).getValue() instanceof Integer)) { - throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr)); - } - - // Calculate TypeInfo - TypeInfo t = myt.getListElementTypeInfo(); - desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); - } - else if (myt.getCategory() == Category.MAP) { - // Only allow only constant indexes for now - if (!(children.get(1) instanceof exprNodeConstantDesc)) { - throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg(expr)); - } - if (!(((exprNodeConstantDesc)children.get(1)).getValue().getClass() == - myt.getMapKeyTypeInfo().getPrimitiveClass())) { - throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE.getMsg(expr)); - } - // Calculate TypeInfo - TypeInfo t = myt.getMapValueTypeInfo(); - - desc = new exprNodeIndexDesc(t, children.get(0), children.get(1)); - } - else { - throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr, - myt.getTypeName())); - } - } else { - // other operators or functions - Class udf = FunctionRegistry.getUDFClass(funcText); - if (udf == null) { - if (isFunction) - throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr.getChild(0))); - else - throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr)); - } - - desc = getFuncExprNodeDesc(funcText, children); - if (desc == null) { - ArrayList> argumentClasses = new ArrayList>(children.size()); - for(int i=0; i= a.length()) return ""; - if (start + len > a.length()) len = a.length() - start; - return a.substring(start, start + len); - } - - public String evaluate(String a, int start) { - if (start >= a.length()) return ""; - return a.substring(start); - } - +package org.apache.hadoop.hive.ql.udf; + +import org.apache.hadoop.hive.ql.exec.UDF; + + +public class UDFSubstr extends UDF { + + public UDFSubstr() { + } + + public String evaluate(String a, int start, int len) { + if (start >= a.length()) return ""; + if (start + len > a.length()) len = a.length() - start; + return a.substring(start, start + len); + } + + public String evaluate(String a, int start) { + if (start >= a.length()) return ""; + return a.substring(start); + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java (working copy) @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFRound implements UDF { +public class UDFRound extends UDF { public UDFRound() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java (working copy) @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFLower implements UDF { +public class UDFLower extends UDF { public UDFLower() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java (working copy) @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.UDF; - - -public class UDFStrLe implements UDF { - - public UDFStrLe() { - } - - public boolean evaluate(String a, String b) { - int code = a.compareTo(b); - return (code <= 0); - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java (working copy) @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFRegExp implements UDF { +public class UDFRegExp extends UDF { private String lastRegex = null; private Pattern p = null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPNotNull implements UDF { +public class UDFOPNotNull extends UDF { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNotNull"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java (working copy) @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFUpper implements UDF { +public class UDFUpper extends UDF { public UDFUpper() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java (working copy) @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.UDF; - - -public class UDFStrNe implements UDF { - - public UDFStrNe() { - } - - public boolean evaluate(String a, String b) { - return !a.equals(b); - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java (revision 0) @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf; + +import org.apache.hadoop.hive.ql.exec.NumericOpMethodResolver; +import org.apache.hadoop.hive.ql.exec.UDF; + +/** + * Base class for numeric operators like +, -, / etc. All these operators + * share a common method resolver (NumericOpMethodResolver). + */ +public abstract class UDFBaseNumericOp extends UDF { + + /** + * Constructor. + * This constructor sets the resolver to be used for comparison operators. + * See {@link UDFMethodResolver} + */ + public UDFBaseNumericOp() { + setResolver(new NumericOpMethodResolver(this.getClass())); + } + + public abstract Byte evaluate(Byte a, Byte b); + public abstract Integer evaluate(Integer a, Integer b); + public abstract Long evaluate(Long a, Long b); + public abstract Float evaluate(Float a, Float b); + public abstract Double evaluate(Double a, Double b); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPBitNot implements UDF { +public class UDFOPBitNot extends UDF { private static Log LOG = LogFactory.getLog(UDFOPBitNot.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPNot implements UDF { +public class UDFOPNot extends UDF { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNot"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPNegative implements UDF { +public class UDFOPNegative extends UDF { private static Log LOG = LogFactory.getLog(UDFOPNegative.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPDivide implements UDF { +public class UDFOPDivide extends UDFBaseNumericOp { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPDivide"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPBitXor implements UDF { +public class UDFOPBitXor extends UDF { private static Log LOG = LogFactory.getLog(UDFOPBitXor.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java (working copy) @@ -21,7 +21,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFConcat implements UDF { +public class UDFConcat extends UDF { public UDFConcat() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPNull implements UDF { +public class UDFOPNull extends UDF { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNull"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java (working copy) @@ -23,7 +23,7 @@ import java.util.Map; import java.util.List; -public class UDFSize implements UDF { +public class UDFSize extends UDF { public UDFSize() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (working copy) @@ -22,7 +22,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFCeil implements UDF { +public class UDFCeil extends UDF { private static Log LOG = LogFactory.getLog(UDFCeil.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPMod implements UDF { +public class UDFOPMod extends UDFBaseNumericOp { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMod"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToBoolean implements UDF { +public class UDFToBoolean extends UDF { private static Log LOG = LogFactory.getLog(UDFToBoolean.class.getName()); @@ -31,6 +31,16 @@ } /** + * Convert a void to boolean. This is called for CAST(... AS BOOLEAN) + * + * @param i The value of a void type + * @return Boolean + */ + public Boolean evaluate(Void i) { + return null; + } + + /** * Convert from a byte to boolean. This is called for CAST(... AS BOOLEAN) * * @param i The byte value to convert @@ -114,4 +124,18 @@ } } + /** + * Convert from a string to boolean. This is called for CAST(... AS BOOLEAN) + * + * @param i The string value to convert + * @return Boolean + */ + public Boolean evaluate(String i) { + if (i == null) { + return null; + } else { + return Boolean.valueOf(i.length() != 0); + } + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (working copy) @@ -18,47 +18,284 @@ package org.apache.hadoop.hive.ql.udf; +import java.sql.Date; + import org.apache.hadoop.hive.ql.exec.UDAF; +import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; - public class UDAFMin extends UDAF { - private double mMin; - private boolean mEmpty; - - public UDAFMin() { - super(); - init(); + static public class MinShortEvaluator implements UDAFEvaluator { + private short mMin; + private boolean mEmpty; + + public MinShortEvaluator() { + super(); + init(); + } + + public void init() { + mMin = 0; + mEmpty = true; + } + + public boolean iterate(Short o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else { + mMin = (short) Math.min(mMin, o); + } + } + return true; + } + + public Short terminatePartial() { + return mEmpty ? null : Short.valueOf(mMin); + } + + public boolean merge(Short o) { + return iterate(o); + } + + public Short terminate() { + return mEmpty ? null : Short.valueOf(mMin); + } } - public void init() { - mMin = 0; - mEmpty = true; + static public class MinIntEvaluator implements UDAFEvaluator { + private int mMin; + private boolean mEmpty; + + public MinIntEvaluator() { + super(); + init(); + } + + public void init() { + mMin = 0; + mEmpty = true; + } + + public boolean iterate(Integer o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else { + mMin = Math.min(mMin, o); + } + } + return true; + } + + public Integer terminatePartial() { + return mEmpty ? null : Integer.valueOf(mMin); + } + + public boolean merge(Integer o) { + return iterate(o); + } + + public Integer terminate() { + return mEmpty ? null : Integer.valueOf(mMin); + } } - public boolean aggregate(Double o) { - if (o != null) { - if (mEmpty) { - mMin = o; - mEmpty = false; - } else { - mMin = Math.min(mMin, o); + static public class MinLongEvaluator implements UDAFEvaluator { + private long mMin; + private boolean mEmpty; + + public MinLongEvaluator() { + super(); + init(); + } + + public void init() { + mMin = 0; + mEmpty = true; + } + + public boolean iterate(Long o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else { + mMin = Math.min(mMin, o); + } } + return true; } - return true; + + public Long terminatePartial() { + return mEmpty ? null : Long.valueOf(mMin); + } + + public boolean merge(Long o) { + return iterate(o); + } + + public Long terminate() { + return mEmpty ? null : Long.valueOf(mMin); + } } - - public Double evaluatePartial() { - return mEmpty ? null : Double.valueOf(mMin); + + static public class MinFloatEvaluator implements UDAFEvaluator { + private float mMin; + private boolean mEmpty; + + public MinFloatEvaluator() { + super(); + init(); + } + + public void init() { + mMin = 0; + mEmpty = true; + } + + public boolean iterate(Float o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else { + mMin = Math.min(mMin, o); + } + } + return true; + } + + public Float terminatePartial() { + return mEmpty ? null : Float.valueOf(mMin); + } + + public boolean merge(Float o) { + return iterate(o); + } + + public Float terminate() { + return mEmpty ? null : Float.valueOf(mMin); + } } - public boolean aggregatePartial(Double o) { - return aggregate(o); + static public class MinDoubleEvaluator implements UDAFEvaluator { + private double mMin; + private boolean mEmpty; + + public MinDoubleEvaluator() { + super(); + init(); + } + + public void init() { + mMin = 0; + mEmpty = true; + } + + public boolean iterate(Double o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else { + mMin = Math.min(mMin, o); + } + } + return true; + } + + public Double terminatePartial() { + return mEmpty ? null : Double.valueOf(mMin); + } + + public boolean merge(Double o) { + return iterate(o); + } + + public Double terminate() { + return mEmpty ? null : Double.valueOf(mMin); + } } - public Double evaluate() { - return mEmpty ? null : Double.valueOf(mMin); + static public class MinStringEvaluator implements UDAFEvaluator { + private String mMin; + private boolean mEmpty; + + public MinStringEvaluator() { + super(); + init(); + } + + public void init() { + mMin = null; + mEmpty = true; + } + + public boolean iterate(String o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else if (mMin.compareTo(o) < 0) { + mMin = o; + } + } + return true; + } + + public String terminatePartial() { + return mEmpty ? null : mMin; + } + + public boolean merge(String o) { + return iterate(o); + } + + public String terminate() { + return mEmpty ? null : mMin; + } } + static public class MinDateEvaluator implements UDAFEvaluator { + private Date mMin; + private boolean mEmpty; + + public MinDateEvaluator() { + super(); + init(); + } + + public void init() { + mMin = null; + mEmpty = true; + } + + public boolean iterate(Date o) { + if (o != null) { + if (mEmpty) { + mMin = o; + mEmpty = false; + } else if (mMin.compareTo(o) < 0){ + mMin = o; + } + } + return true; + } + + public Date terminatePartial() { + return mEmpty ? null : mMin; + } + + public boolean merge(Date o) { + return iterate(o); + } + + public Date terminate() { + return mEmpty ? null : mMin; + } + } } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java (working copy) @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFFromUnixTime implements UDF { +public class UDFFromUnixTime extends UDF { private static Log LOG = LogFactory.getLog(UDFFromUnixTime.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java (working copy) @@ -24,7 +24,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFRTrim implements UDF { +public class UDFRTrim extends UDF { public UDFRTrim() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToShort implements UDF { +public class UDFToShort extends UDF { private static Log LOG = LogFactory.getLog(UDFToByte.class.getName()); @@ -31,6 +31,16 @@ } /** + * Convert from void to a short. This is called for CAST(... AS SMALLINT) + * + * @param i The void value to convert + * @return Short + */ + public Short evaluate(Void i) { + return null; + } + + /** * Convert from boolean to a short. This is called for CAST(... AS SMALLINT) * * @param i The boolean value to convert @@ -135,4 +145,17 @@ } } + /** + * Convert from date to a short. This is called for CAST(... AS SMALLINT) + * + * @param i The date value to convert + * @return Short + */ + public Short evaluate(java.sql.Date i) { + if (i == null) { + return null; + } else { + return Long.valueOf(i.getTime()).shortValue(); + } + } } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java (working copy) @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.UDF; - - -public class UDFStrGt implements UDF { - - public UDFStrGt() { - } - - public boolean evaluate(String a, String b) { - int code = a.compareTo(b); - return (code > 0); - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToFloat implements UDF { +public class UDFToFloat extends UDF { private static Log LOG = LogFactory.getLog(UDFToFloat.class.getName()); @@ -31,6 +31,16 @@ } /** + * Convert from void to a float. This is called for CAST(... AS FLOAT) + * + * @param i The void value to convert + * @return Float + */ + public Float evaluate(Void i) { + return null; + } + + /** * Convert from boolean to a float. This is called for CAST(... AS FLOAT) * * @param i The boolean value to convert @@ -131,4 +141,17 @@ } } + /** + * Convert from date to a float. This is called for CAST(... AS FLOAT) + * + * @param i The date value to convert + * @return Float + */ + public Float evaluate(java.sql.Date i) { + if (i == null) { + return null; + } else { + return Float.valueOf(i.getTime()); + } + } } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (working copy) @@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFDayOfMonth implements UDF { +public class UDFDayOfMonth extends UDF { private static Log LOG = LogFactory.getLog(UDFDayOfMonth.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java (working copy) @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFJson implements UDF { +public class UDFJson extends UDF { private static Log LOG = LogFactory.getLog(UDFJson.class.getName()); private Pattern pattern_key = Pattern.compile("^([a-zA-Z0-9_\\-]+).*"); private Pattern pattern_index = Pattern.compile("\\[([0-9]+|\\*)\\]"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java (working copy) @@ -24,7 +24,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFTrim implements UDF { +public class UDFTrim extends UDF { public UDFTrim() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java (working copy) @@ -22,7 +22,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -public class UDFDefaultSampleHashFn implements UDF { +public class UDFDefaultSampleHashFn extends UDF { protected final Log LOG; public UDFDefaultSampleHashFn() { Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFFloor implements UDF { +public class UDFFloor extends UDF { private static Log LOG = LogFactory.getLog(UDFFloor.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java (working copy) @@ -18,46 +18,18 @@ package org.apache.hadoop.hive.ql.udf; -import java.sql.Date; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.ComparisonOpMethodResolver; import org.apache.hadoop.hive.ql.exec.UDF; +public abstract class UDFBaseCompare extends UDF { -public abstract class UDFBaseCompare implements UDF { - - private static Log LOG = LogFactory.getLog(UDFBaseCompare.class.getName()); - + /** + * This constructor sets the resolver to be used for comparison operators. + * See {@link UDFMethodResolver} + */ public UDFBaseCompare() { + setResolver(new ComparisonOpMethodResolver(this.getClass())); } - public abstract Boolean evaluate(Double a, Double b); - - /** If one of the argument is a String and the other is a Number, convert - * String to double and the Number to double, and then compare. - */ - public Boolean evaluate(String a, Number b) { - Double aDouble = null; - try { - aDouble = Double.valueOf(a); - } catch (Exception e){ - // do nothing: aDouble will be null. - } - return evaluate(aDouble, new Double(b.doubleValue())); - } - - /** If one of the argument is a String and the other is a Number, convert - * String to double and the Number to double, and then compare. - */ - public Boolean evaluate(Number a, String b) { - Double bDouble = null; - try { - bDouble = Double.valueOf(b); - } catch (Exception e){ - // do nothing: bDouble will be null. - } - return evaluate(new Double(a.doubleValue()), bDouble); - } - + public abstract Boolean evaluate(Double a, Double b); } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java (working copy) @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.UDF; - - -public class UDFStrGe implements UDF { - - public UDFStrGe() { - } - - public boolean evaluate(String a, String b) { - int code = a.compareTo(b); - return (code >= 0); - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java (working copy) @@ -24,7 +24,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFLike implements UDF { +public class UDFLike extends UDF { private static Log LOG = LogFactory.getLog(UDFLike.class.getName()); private String lastLikePattern = null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (working copy) @@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFMonth implements UDF { +public class UDFMonth extends UDF { private static Log LOG = LogFactory.getLog(UDFMonth.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java (working copy) @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFRegExpReplace implements UDF { +public class UDFRegExpReplace extends UDF { private String lastRegex = null; private Pattern p = null; Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java (working copy) @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.UDF; - - -public class UDFStrEq implements UDF { - - private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFStrEq"); - - public UDFStrEq() { - } - - public boolean evaluate(String a, String b) { - return a.equals(b); - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPBitAnd implements UDF { +public class UDFOPBitAnd extends UDF { private static Log LOG = LogFactory.getLog(UDFOPBitAnd.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPAnd implements UDF { +public class UDFOPAnd extends UDF { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPAnd"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (working copy) @@ -18,11 +18,12 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.hadoop.hive.ql.exec.UDAF; +import org.apache.hadoop.hive.ql.exec.NumericUDAF; +import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; -public class UDAFSum extends UDAF { +public class UDAFSum extends NumericUDAF implements UDAFEvaluator { private double mSum; private boolean mEmpty; @@ -37,7 +38,7 @@ mEmpty = true; } - public boolean aggregate(Double o) { + public boolean iterate(Double o) { if (o != null) { mSum += o; mEmpty = false; @@ -45,12 +46,12 @@ return true; } - public Double evaluatePartial() { + public Double terminatePartial() { // This is SQL standard - sum of zero items should be null. return mEmpty ? null : new Double(mSum); } - public boolean aggregatePartial(Double o) { + public boolean merge(Double o) { if (o != null) { mSum += o; mEmpty = false; @@ -58,7 +59,7 @@ return true; } - public Double evaluate() { + public Double terminate() { // This is SQL standard - sum of zero items should be null. return mEmpty ? null : new Double(mSum); } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (working copy) @@ -23,13 +23,17 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToString implements UDF { +public class UDFToString extends UDF { private static Log LOG = LogFactory.getLog(UDFToString.class.getName()); public UDFToString() { } + public String evaluate(Void i) { + return null; + } + public String evaluate(Boolean i) { if (i == null) { return null; @@ -86,4 +90,17 @@ } } + /** + * Convert from date to a string. This is called for CAST(... AS STRING) + * + * @param i The date value to convert + * @return String + */ + public String evaluate(java.sql.Date i) { + if (i == null) { + return null; + } else { + return i.toString(); + } + } } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPOr implements UDF { +public class UDFOPOr extends UDF { private static Log LOG = LogFactory.getLog(UDFOPOr.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFToDate implements UDF { +public class UDFToDate extends UDF { private static Log LOG = LogFactory.getLog(UDFToDate.class.getName()); @@ -44,4 +44,62 @@ } } + public java.sql.Date evaluate(Void i) { + return null; + } + + public java.sql.Date evaluate(Byte i) { + if (i == null) { + return null; + } + else { + return new java.sql.Date(i.longValue()); + } + } + + public java.sql.Date evaluate(Short i) { + if (i == null) { + return null; + } + else { + return new java.sql.Date(i.longValue()); + } + } + + public java.sql.Date evaluate(Integer i) { + if (i == null) { + return null; + } + else { + return new java.sql.Date(i.longValue()); + } + } + + public java.sql.Date evaluate(Long i) { + if (i == null) { + return null; + } + else { + return new java.sql.Date(i.longValue()); + } + } + + public java.sql.Date evaluate(Float i) { + if (i == null) { + return null; + } + else { + return new java.sql.Date(i.longValue()); + } + } + + public java.sql.Date evaluate(Double i) { + if (i == null) { + return null; + } + else { + return new java.sql.Date(i.longValue()); + } + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java (working copy) @@ -32,7 +32,7 @@ * The case of int + double will be handled by implicit type casting using * UDFRegistry.implicitConvertable method. */ -public class UDFOPPlus implements UDF { +public class UDFOPPlus extends UDFBaseNumericOp { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPPlus"); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; import java.util.Random; -public class UDFRand implements UDF { +public class UDFRand extends UDF { private static Log LOG = LogFactory.getLog(UDFRand.class.getName()); Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (working copy) @@ -18,47 +18,286 @@ package org.apache.hadoop.hive.ql.udf; +import java.sql.Date; + import org.apache.hadoop.hive.ql.exec.UDAF; +import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; public class UDAFMax extends UDAF { - private double mMax; - private boolean mEmpty; - - public UDAFMax() { - super(); - init(); + static public class MaxShortEvaluator implements UDAFEvaluator { + private short mMax; + private boolean mEmpty; + + public MaxShortEvaluator() { + super(); + init(); + } + + public void init() { + mMax = 0; + mEmpty = true; + } + + public boolean iterate(Short o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else { + mMax = (short) Math.max(mMax, o); + } + } + return true; + } + + public Short terminatePartial() { + return mEmpty ? null : Short.valueOf(mMax); + } + + public boolean merge(Short o) { + return iterate(o); + } + + public Short terminate() { + return mEmpty ? null : Short.valueOf(mMax); + } } - public void init() { - mMax = 0; - mEmpty = true; + static public class MaxIntEvaluator implements UDAFEvaluator { + private int mMax; + private boolean mEmpty; + + public MaxIntEvaluator() { + super(); + init(); + } + + public void init() { + mMax = 0; + mEmpty = true; + } + + public boolean iterate(Integer o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else { + mMax = Math.max(mMax, o); + } + } + return true; + } + + public Integer terminatePartial() { + return mEmpty ? null : Integer.valueOf(mMax); + } + + public boolean merge(Integer o) { + return iterate(o); + } + + public Integer terminate() { + return mEmpty ? null : Integer.valueOf(mMax); + } } - public boolean aggregate(Double o) { - if (o != null) { - if (mEmpty) { - mMax = o; - mEmpty = false; - } else { - mMax = Math.max(mMax, o); + static public class MaxLongEvaluator implements UDAFEvaluator { + private long mMax; + private boolean mEmpty; + + public MaxLongEvaluator() { + super(); + init(); + } + + public void init() { + mMax = 0; + mEmpty = true; + } + + public boolean iterate(Long o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else { + mMax = Math.max(mMax, o); + } } + return true; } - return true; + + public Long terminatePartial() { + return mEmpty ? null : Long.valueOf(mMax); + } + + public boolean merge(Long o) { + return iterate(o); + } + + public Long terminate() { + return mEmpty ? null : Long.valueOf(mMax); + } } - - public Double evaluatePartial() { - return mEmpty ? null : Double.valueOf(mMax); + + static public class MaxFloatEvaluator implements UDAFEvaluator { + private float mMax; + private boolean mEmpty; + + public MaxFloatEvaluator() { + super(); + init(); + } + + public void init() { + mMax = 0; + mEmpty = true; + } + + public boolean iterate(Float o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else { + mMax = Math.max(mMax, o); + } + } + return true; + } + + public Float terminatePartial() { + return mEmpty ? null : Float.valueOf(mMax); + } + + public boolean merge(Float o) { + return iterate(o); + } + + public Float terminate() { + return mEmpty ? null : Float.valueOf(mMax); + } } - public boolean aggregatePartial(Double o) { - return aggregate(o); + static public class MaxDoubleEvaluator implements UDAFEvaluator { + private double mMax; + private boolean mEmpty; + + public MaxDoubleEvaluator() { + super(); + init(); + } + + public void init() { + mMax = 0; + mEmpty = true; + } + + public boolean iterate(Double o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else { + mMax = Math.max(mMax, o); + } + } + return true; + } + + public Double terminatePartial() { + return mEmpty ? null : Double.valueOf(mMax); + } + + public boolean merge(Double o) { + return iterate(o); + } + + public Double terminate() { + return mEmpty ? null : Double.valueOf(mMax); + } } - public Double evaluate() { - return mEmpty ? null : Double.valueOf(mMax); + static public class MaxStringEvaluator implements UDAFEvaluator { + private String mMax; + private boolean mEmpty; + + public MaxStringEvaluator() { + super(); + init(); + } + + public void init() { + mMax = null; + mEmpty = true; + } + + public boolean iterate(String o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else if (mMax.compareTo(o) < 0) { + mMax = o; + } + } + return true; + } + + public String terminatePartial() { + return mEmpty ? null : mMax; + } + + public boolean merge(String o) { + return iterate(o); + } + + public String terminate() { + return mEmpty ? null : mMax; + } } + static public class MaxDateEvaluator implements UDAFEvaluator { + private Date mMax; + private boolean mEmpty; + + public MaxDateEvaluator() { + super(); + init(); + } + + public void init() { + mMax = null; + mEmpty = true; + } + + public boolean iterate(Date o) { + if (o != null) { + if (mEmpty) { + mMax = o; + mEmpty = false; + } else if (mMax.compareTo(o) < 0){ + mMax = o; + } + } + return true; + } + + public Date terminatePartial() { + return mEmpty ? null : mMax; + } + + public boolean merge(Date o) { + return iterate(o); + } + + public Date terminate() { + return mEmpty ? null : mMax; + } + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java (working copy) @@ -24,7 +24,7 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -public class UDFLTrim implements UDF { +public class UDFLTrim extends UDF { public UDFLTrim() { } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java (working copy) @@ -19,9 +19,10 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.UDAF; +import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; -public class UDAFCount extends UDAF { +public class UDAFCount extends UDAF implements UDAFEvaluator { private long mCount; @@ -34,7 +35,7 @@ mCount = 0; } - public boolean aggregate(Object o) { + public boolean iterate(Object o) { // Our SerDe between map/reduce boundary may convert MetadataTypedSerDe to if (o != null && !o.equals("")) { mCount ++; @@ -42,18 +43,17 @@ return true; } - public Long evaluatePartial() { + public Long terminatePartial() { return Long.valueOf(mCount); } - public boolean aggregatePartial(Long count) { + public boolean merge(Long count) { mCount += count; return true; } - public Long evaluate() { + public Long terminate() { return Long.valueOf(mCount); } - } Index: ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java (revision 736746) +++ ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDF; -public class UDFOPMinus implements UDF { +public class UDFOPMinus extends UDFBaseNumericOp { private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMinus");