Uploaded image for project: 'Flink'
  1. Flink
  2. FLINK-15511

export org.apache.flink.table.api.TableException when flink 1.10 connect hive

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Closed
    • Major
    • Resolution: Invalid
    • 1.10.0
    • 1.10.0
    • Connectors / Hive
    • flink master

      hive 1.2.1

       

    Description

      run scripts:

      bin/start-scala-shell.sh yarn -qu bi -jm 1024m -tm 2048m

      import org.apache.flink.table.catalog.hive.HiveCatalog
      val name = "myhive"
      val defaultDatabase = "test"
      val hiveConfDir = "/etc/hive/conf"
      val version = "1.2.1" // or 1.2.1 2.3.4
      val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version)
      stenv.registerCatalog("myhive", hive)
      stenv.useCatalog("myhive")
      stenv.listTables
      stenv.sqlQuery("select * from gsp_test3").toAppendStream[Row].print

       gsp_test3 table columns:

      id int 

      name string

      gsp_test3  table  storage:

      txt file

       

      scripts run message

      scala> import org.apache.flink.table.catalog.hive.HiveCatalog
      import org.apache.flink.table.catalog.hive.HiveCatalog

      scala> val name = "myhive"
      name: String = myhive

      scala> val defaultDatabase = "test"
      defaultDatabase: String = test

      scala> val hiveConfDir = "/etc/hive/conf"
      hiveConfDir: String = /etc/hive/conf

      scala> val version = "1.2.1" // or 1.2.1 2.3.4
      version: String = 1.2.1

      scala> val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version)
      20/01/08 14:36:10 INFO hive.HiveCatalog: Setting hive conf dir as /etc/hive/conf
      20/01/08 14:36:10 WARN conf.HiveConf: HiveConf of name hive.server2.enable.impersonation does not exist
      20/01/08 14:36:10 WARN conf.HiveConf: HiveConf of name hive.mapred.supports.subdirectories does not exist
      20/01/08 14:36:10 INFO hive.HiveCatalog: Created HiveCatalog 'myhive'
      hive: org.apache.flink.table.catalog.hive.HiveCatalog = org.apache.flink.table.catalog.hive.HiveCatalog@60729135

      scala> stenv.registerCatalog("myhive", hive)
      20/01/08 14:36:10 INFO hive.metastore: Trying to connect to metastore with URI thrift://bgnode4:9083
      20/01/08 14:36:10 INFO hive.metastore: Connected to metastore.
      20/01/08 14:36:10 INFO hive.HiveCatalog: Connected to Hive metastore

      scala> stenv.useCatalog("myhive")
      20/01/08 14:36:10 INFO catalog.CatalogManager: Set the current default catalog as [myhive] and the current default database as [test].

      scala> stenv.listTables
      res6: Array[String] = Array(amazonproductscore_test, amazonproductscore_test_tmp, amazonshopmanagerkpi, bucketed_user, bulkload_spark_gross_profit_items_zcm, dim_date_test, dw_gross_profit_items_phoenix_test, dw_gross_profit_items_phoenix_test2, dw_gross_profit_items_phoenix_test3, dw_gross_profit_items_phoenix_test4, dw_gross_profit_items_phoenix_test5, gsp_test12, gsp_test2, gsp_test3, hive_phoenix, ni, orderparent_test, orderparent_test2, phoenix_orderparent_id_put_tb, phoenix_orderparent_id_put_tb2, phoenix_orderparent_id_tb, productdailysales, result20190404, result20190404_2, result20190404_3, result20190404_4_5_9, result20190404_5, result20190404vat, result20190404vat11, result20190404vat12, result20190404vat13, result20190404vat5, result20190404vat6_2, ...
      scala> stenv.sqlQuery("select * from gsp_test3").toAppendStream[Row].print
      20/01/08 14:36:13 INFO typeutils.TypeExtractor: class org.apache.flink.types.Row does not contain a getter for field fields
      20/01/08 14:36:13 INFO typeutils.TypeExtractor: class org.apache.flink.types.Row does not contain a setter for field fields
      20/01/08 14:36:13 INFO typeutils.TypeExtractor: Class class org.apache.flink.types.Row cannot be used as a POJO type because not all fields are valid POJO fields, and must be processed as GenericType. Please read the Flink documentation on "Data Types & Serialization" for details of the effect on performance.
      20/01/08 14:36:13 WARN conf.HiveConf: HiveConf of name hive.server2.enable.impersonation does not exist
      20/01/08 14:36:13 WARN conf.HiveConf: HiveConf of name hive.mapred.supports.subdirectories does not exist
      20/01/08 14:36:13 INFO hive.metastore: Trying to connect to metastore with URI thrift://bgnode3:9083
      20/01/08 14:36:13 INFO hive.metastore: Connected to metastore.
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: jobmanager.rpc.address, localhost
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: jobmanager.rpc.port, 6123
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: jobmanager.heap.size, 1024m
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: taskmanager.memory.process.size, 1024m
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: taskmanager.numberOfTaskSlots, 1
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: parallelism.default, 1
      20/01/08 14:36:13 INFO configuration.GlobalConfiguration: Loading configuration property: jobmanager.execution.failover-strategy, region
      20/01/08 14:36:13 INFO mapred.FileInputFormat: Total input paths to process : 1
      20/01/08 14:36:13 INFO hive.HiveTableSource: Hive source(test.gsp_test3}) createInputSplits use time: 72 ms
      org.apache.flink.table.api.TableException: TableSource of type org.apache.flink.connectors.hive.HiveTableSource returned a DataStream of data type LEGACY('STRUCTURED_TYPE', 'ANY<org.apache.flink.table.dataformat.BaseRow, rO0ABXNyADhvcmcuYXBhY2hlLmZsaW5rLnRhYmxlLnJ1bnRpbWUudHlwZXV0aWxzLkJhc2VSb3dUeXBlSW5mbwAAAAAAAAABAgACWwAKZmllbGROYW1lc3QAE1tMamF2YS9sYW5nL1N0cmluZztbAAxsb2dpY2FsVHlwZXN0ADNbTG9yZy9hcGFjaGUvZmxpbmsvdGFibGUvdHlwZXMvbG9naWNhbC9Mb2dpY2FsVHlwZTt4cgA1b3JnLmFwYWNoZS5mbGluay5hcGkuamF2YS50eXBldXRpbHMuVHVwbGVUeXBlSW5mb0Jhc2UAAAAAAAAAAQIAAkkAC3RvdGFsRmllbGRzWwAFdHlwZXN0ADdbTG9yZy9hcGFjaGUvZmxpbmsvYXBpL2NvbW1vbi90eXBlaW5mby9UeXBlSW5mb3JtYXRpb247eHIAM29yZy5hcGFjaGUuZmxpbmsuYXBpLmNvbW1vbi50eXBldXRpbHMuQ29tcG9zaXRlVHlwZQAAAAAAAAABAgABTAAJdHlwZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzczt4cgA0b3JnLmFwYWNoZS5mbGluay5hcGkuY29tbW9uLnR5cGVpbmZvLlR5cGVJbmZvcm1hdGlvbpSNyEi6s3rrAgAAeHB2cgApb3JnLmFwYWNoZS5mbGluay50YWJsZS5kYXRhZm9ybWF0LkJhc2VSb3cAAAAAAAAAAAAAAHhwAAAAAnVyADdbTG9yZy5hcGFjaGUuZmxpbmsuYXBpLmNvbW1vbi50eXBlaW5mby5UeXBlSW5mb3JtYXRpb247uKBspBqaFLYCAAB4cAAAAAJzcgA0b3JnLmFwYWNoZS5mbGluay5hcGkuY29tbW9uLnR5cGVpbmZvLkludGVnZXJUeXBlSW5mb5AFxEVpQEqVAgAAeHIANG9yZy5hcGFjaGUuZmxpbmsuYXBpLmNvbW1vbi50eXBlaW5mby5OdW1lcmljVHlwZUluZm-tmMZzMAKMFgIAAHhyADJvcmcuYXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZWluZm8uQmFzaWNUeXBlSW5mb_oE8IKlad0GAgAETAAFY2xhenpxAH4ABkwAD2NvbXBhcmF0b3JDbGFzc3EAfgAGWwAXcG9zc2libGVDYXN0VGFyZ2V0VHlwZXN0ABJbTGphdmEvbGFuZy9DbGFzcztMAApzZXJpYWxpemVydAA2TG9yZy9hcGFjaGUvZmxpbmsvYXBpL2NvbW1vbi90eXBldXRpbHMvVHlwZVNlcmlhbGl6ZXI7eHEAfgAHdnIAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cHZyADhvcmcuYXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLmJhc2UuSW50Q29tcGFyYXRvcgAAAAAAAAABAgAAeHIAPm9yZy5hcGFjaGUuZmxpbmsuYXBpLmNvbW1vbi50eXBldXRpbHMuYmFzZS5CYXNpY1R5cGVDb21wYXJhdG9yAAAAAAAAAAECAAJaABNhc2NlbmRpbmdDb21wYXJpc29uWwALY29tcGFyYXRvcnN0ADdbTG9yZy9hcGFjaGUvZmxpbmsvYXBpL2NvbW1vbi90eXBldXRpbHMvVHlwZUNvbXBhcmF0b3I7eHIANG9yZy5hcGFjaGUuZmxpbmsuYXBpLmNvbW1vbi50eXBldXRpbHMuVHlwZUNvbXBhcmF0b3IAAAAAAAAAAQIAAHhwdXIAEltMamF2YS5sYW5nLkNsYXNzO6sW167LzVqZAgAAeHAAAAAEdnIADmphdmEubGFuZy5Mb25nO4vkkMyPI98CAAFKAAV2YWx1ZXhxAH4AFHZyAA9qYXZhLmxhbmcuRmxvYXTa7cmi2zzw7AIAAUYABXZhbHVleHEAfgAUdnIAEGphdmEubGFuZy5Eb3VibGWAs8JKKWv7BAIAAUQABXZhbHVleHEAfgAUdnIAE2phdmEubGFuZy5DaGFyYWN0ZXI0i0fZaxomeAIAAUMABXZhbHVleHBzcgA4b3JnLmFwYWNoZS5mbGluay5hcGkuY29tbW9uLnR5cGV1dGlscy5iYXNlLkludFNlcmlhbGl6ZXIAAAAAAAAAAQIAAHhyAEJvcmcuYXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLmJhc2UuVHlwZVNlcmlhbGl6ZXJTaW5nbGV0b255qYeqxy53RQIAAHhyADRvcmcuYXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLlR5cGVTZXJpYWxpemVyAAAAAAAAAAECAAB4cHNxAH4AD3ZyABBqYXZhLmxhbmcuU3RyaW5noPCkOHo7s0ICAAB4cHZyADtvcmcuYXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLmJhc2UuU3RyaW5nQ29tcGFyYXRvcgAAAAAAAAABAgAAeHEAfgAXdXEAfgAbAAAAAHNyADtvcmcuYXBhY2hlLmZsaW5rLmFwaS5jb21tb24udHlwZXV0aWxzLmJhc2UuU3RyaW5nU2VyaWFsaXplcgAAAAAAAAABAgAAeHEAfgAmdXIAE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAnQAAmlkdAAEbmFtZXVyADNbTG9yZy5hcGFjaGUuZmxpbmsudGFibGUudHlwZXMubG9naWNhbC5Mb2dpY2FsVHlwZTsYNEes23GhNgIAAHhwAAAAAnNyACxvcmcuYXBhY2hlLmZsaW5rLnRhYmxlLnR5cGVzLmxvZ2ljYWwuSW50VHlwZQhSCPRJWrHYAgAAeHIAMG9yZy5hcGFjaGUuZmxpbmsudGFibGUudHlwZXMubG9naWNhbC5Mb2dpY2FsVHlwZZmP7fGmbmA1AgACWgAKaXNOdWxsYWJsZUwACHR5cGVSb290dAA2TG9yZy9hcGFjaGUvZmxpbmsvdGFibGUvdHlwZXMvbG9naWNhbC9Mb2dpY2FsVHlwZVJvb3Q7eHABfnIANG9yZy5hcGFjaGUuZmxpbmsudGFibGUudHlwZXMubG9naWNhbC5Mb2dpY2FsVHlwZVJvb3QAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAdJTlRFR0VSc3IAMG9yZy5hcGFjaGUuZmxpbmsudGFibGUudHlwZXMubG9naWNhbC5WYXJDaGFyVHlwZV6wliIAWyZdAgABSQAGbGVuZ3RoeHEAfgA4AX5xAH4AO3QAB1ZBUkNIQVJ_____>') that does not match with the data type ROW<`id` INT, `name` STRING> declared by the TableSource.getProducedDataType() method. Please validate the implementation of the TableSource.
      at org.apache.flink.table.plan.nodes.datastream.StreamTableSourceScan.translateToPlan(StreamTableSourceScan.scala:116)
      at org.apache.flink.table.planner.StreamPlanner.translateToCRow(StreamPlanner.scala:251)
      at org.apache.flink.table.planner.StreamPlanner.translateOptimized(StreamPlanner.scala:412)
      at org.apache.flink.table.planner.StreamPlanner.translateToType(StreamPlanner.scala:402)
      at org.apache.flink.table.planner.StreamPlanner.org$apache$flink$table$planner$StreamPlanner$$translate(StreamPlanner.scala:180)
      at org.apache.flink.table.planner.StreamPlanner$$anonfun$translate$1.apply(StreamPlanner.scala:117)
      at org.apache.flink.table.planner.StreamPlanner$$anonfun$translate$1.apply(StreamPlanner.scala:117)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      at scala.collection.Iterator$class.foreach(Iterator.scala:891)
      at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
      at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
      at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
      at scala.collection.AbstractTraversable.map(Traversable.scala:104)
      at org.apache.flink.table.planner.StreamPlanner.translate(StreamPlanner.scala:117)
      at org.apache.flink.table.api.scala.internal.StreamTableEnvironmentImpl.toDataStream(StreamTableEnvironmentImpl.scala:210)
      at org.apache.flink.table.api.scala.internal.StreamTableEnvironmentImpl.toAppendStream(StreamTableEnvironmentImpl.scala:107)
      at org.apache.flink.table.api.scala.TableConversions.toAppendStream(TableConversions.scala:101)
      ... 30 elided

      scala> :quit

      Attachments

        Issue Links

          Activity

            People

              Unassigned Unassigned
              chenchencc chenchencc
              Votes:
              0 Vote for this issue
              Watchers:
              4 Start watching this issue

              Dates

                Created:
                Updated:
                Resolved: