Details
-
Bug
-
Status: Closed
-
Major
-
Resolution: Fixed
-
None
Description
# Spark sql-client execute CREATE TABLE tpcds_hudi_100.store_sales_id_wujuan USING hudi tblproperties ( type= 'mor', primaryKey = 'id', preCombineField = 'ts', hoodie.parquet.compression.codec='snappy', hoodie.upsert.shuffle.parallelism = 8, hoodie.index.type='BLOOM', hoodie.bloom.index.use.metadata=true, hoodie.metadata.enable=true, hoodie.metadata.index.bloom.filter.enable=true, hoodie.metadata.index.bloom.filter.column.list = 'id', hoodie.metadata.index.column.stats.enable=true, hoodie.metadata.index.column.stats.column.list='id', hoodie.enable.data.skipping=true, hoodie.table.metadata.partitions='bloom_filters,column_stats,files' ) asselect monotonically_increasing_id() as id, ss_sold_time_sk, ss_item_sk , ss_customer_sk , ss_cdemo_sk , ss_hdemo_sk , ss_addr_sk , ss_store_sk , ss_promo_sk , ss_ticket_number , ss_quantity , ss_wholesale_cost , ss_list_price , ss_sales_price , ss_ext_discount_amt , ss_ext_sales_price , ss_ext_wholesale_cost , ss_ext_list_price , ss_ext_tax, ss_coupon_amt, ss_net_paid, ss_net_paid_inc_tax, 1as ts, ss_sold_date_sk from tpcds_hudi_100.store_sales; -- Flink sql-client execute -- hive catalog create catalog wujuan_hudi_catalogwith ( 'type' = 'hudi', 'mode' = 'hms', 'hive.conf.dir'='/opt/dtstack/Hive/hive_pkg/conf' ); use catalog wujuan_hudi_catalog; select * from tpcds_hudi_100.store_sales_id_wujuan where id =163208757251;
Attachments
Attachments
Issue Links
- links to