From 257ea0d1a4870922a999596279040a150fd7e39e Mon Sep 17 00:00:00 2001 From: Nishant Date: Wed, 4 Jul 2018 18:41:32 +0530 Subject: [PATCH] [HIVE-20085] Make all Druid Tables as EXTERNAL tables Changes Include - 1. Adding of new config to enable CTAS - hive.ctas.external.tables, default to false 2. Druid Schema is now stored always, This is to allow ALTER Table statement on druid tables 3. If user has specified druid datasource in CREATE TABLE statement, schema will be discovered from druid otherwise user needs to provide the schema 4. Insert/Insert overwrite will be supported for Druid External tables, when hive config hive.insert.into.external.tables is set to true. By default it is true 5. Drop will drop data when external.table.purge is true on the table, default for new druid tables is set via property - hive.external.table.purge.default (by default false) --- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 2 + .../hadoop/hive/druid/DruidStorageHandler.java | 70 +++++++--------------- .../apache/hadoop/hive/druid/serde/DruidSerDe.java | 3 +- .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 2 +- .../queries/clientpositive/druid_timestamptz.q | 2 +- .../queries/clientpositive/druid_timestamptz2.q | 10 ++-- .../queries/clientpositive/druidkafkamini_basic.q | 2 +- .../clientpositive/druidmini_dynamic_partition.q | 8 +-- .../queries/clientpositive/druidmini_expressions.q | 2 +- .../queries/clientpositive/druidmini_extractTime.q | 4 +- .../queries/clientpositive/druidmini_floorTime.q | 2 +- .../test/queries/clientpositive/druidmini_joins.q | 2 +- .../queries/clientpositive/druidmini_masking.q | 2 +- ql/src/test/queries/clientpositive/druidmini_mv.q | 1 + .../test/queries/clientpositive/druidmini_test1.q | 4 +- .../queries/clientpositive/druidmini_test_alter.q | 2 +- .../queries/clientpositive/druidmini_test_insert.q | 6 +- .../clientpositive/druid/druid_timestamptz.q.out | 4 +- .../clientpositive/druid/druid_timestamptz2.q.out | 20 +++---- .../druid/druidkafkamini_basic.q.out | 6 +- .../druid/druidmini_dynamic_partition.q.out | 17 +++--- .../druid/druidmini_expressions.q.out | 6 +- .../druid/druidmini_extractTime.q.out | 8 +-- .../clientpositive/druid/druidmini_floorTime.q.out | 4 +- .../clientpositive/druid/druidmini_joins.q.out | 4 +- .../clientpositive/druid/druidmini_masking.q.out | 4 +- .../clientpositive/druid/druidmini_test1.q.out | 4 +- .../druid/druidmini_test_alter.q.out | 4 +- .../druid/druidmini_test_insert.q.out | 12 ++-- .../hive/metastore/utils/MetaStoreUtils.java | 19 ++++++ 30 files changed, 118 insertions(+), 118 deletions(-) diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 7ef22d6ff2..d896560def 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2970,6 +2970,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal HIVE_INSERT_INTO_MULTILEVEL_DIRS("hive.insert.into.multilevel.dirs", false, "Where to insert into multilevel directories like\n" + "\"insert directory '/HIVEFT25686/chinna/' from table\""), + HIVE_CTAS_EXTERNAL_TABLES("hive.ctas.external.tables", false, + "whether CTAS for external tables is allowed"), HIVE_INSERT_INTO_EXTERNAL_TABLES("hive.insert.into.external.tables", true, "whether insert into external tables is allowed"), HIVE_TEMPORARY_TABLE_STORAGE( diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java index 57e4800bad..ce132233b7 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java @@ -215,12 +215,10 @@ public void configureInputJobCredentials(TableDesc tableDesc, Map dataSegmentList = DruidStorageHandlerUtils - .getCreatedSegments(segmentDescriptorDir, getConf()); - for (DataSegment dataSegment : dataSegmentList) { - try { - deleteSegment(dataSegment); - } catch (SegmentLoadingException e) { - LOG.error(String.format("Error while trying to clean the segment [%s]", dataSegment), e); - } - } - } catch (IOException e) { - LOG.error("Exception while rollback", e); - throw Throwables.propagate(e); - } finally { - cleanWorkingDir(); - } + cleanWorkingDir(); } @Override public void commitCreateTable(Table table) throws MetaException { - if (MetaStoreUtils.isExternalTable(table)) { - // For external tables, we do not need to do anything else - return; - } if(isKafkaStreamingTable(table)){ updateKafkaIngestion(table); } - this.commitInsertTable(table, true); + // For CTAS queries when user has explicitly specified the datasource. + // We will append the data to existing druid datasource. + this.commitInsertTable(table, false); } private void updateKafkaIngestion(Table table){ @@ -762,9 +735,6 @@ public void rollbackDropTable(Table table) { @Override public void commitDropTable(Table table, boolean deleteData) { - if (MetaStoreUtils.isExternalTable(table)) { - return; - } if(isKafkaStreamingTable(table)) { // Stop Kafka Ingestion first final String overlordAddress = Preconditions.checkNotNull(HiveConf @@ -775,6 +745,11 @@ public void commitDropTable(Table table, boolean deleteData) { "Druid Datasource name is null"); stopKafkaIngestion(overlordAddress, dataSourceName); } + + if (!MetaStoreUtils.isExternalTablePurge(table)) { + // Do not delete segments from Druid when external table purge is not set + return; + } String dataSourceName = Preconditions .checkNotNull(table.getParameters().get(Constants.DRUID_DATA_SOURCE), "DataSource name is null !" @@ -806,9 +781,6 @@ public void commitDropTable(Table table, boolean deleteData) { public void commitInsertTable(Table table, boolean overwrite) throws MetaException { LOG.debug("commit insert into table {} overwrite {}", table.getTableName(), overwrite); - if (MetaStoreUtils.isExternalTable(table)) { - throw new MetaException("Cannot insert data into external table backed by Druid"); - } try { // Check if there segments to load final Path segmentDescriptorDir = getSegmentDescriptorDir(); diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java index f0e12a22fe..df9049ea02 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java @@ -483,7 +483,6 @@ protected SegmentAnalysis submitMetadataRequest(String address, SegmentMetadataQ } @Override public boolean shouldStoreFieldsInMetastore(Map tableParams) { - // If Druid table is not an external table store the schema in metadata store. - return !MetaStoreUtils.isExternal(tableParams); + return true; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1271799907..59f668b5d4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -13049,7 +13049,7 @@ ASTNode analyzeCreateTable( throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg()); } } - if (isExt) { + if (!conf.getBoolVar(HiveConf.ConfVars.HIVE_INSERT_INTO_EXTERNAL_TABLES) && isExt) { throw new SemanticException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg()); } command_type = CTAS; diff --git a/ql/src/test/queries/clientpositive/druid_timestamptz.q b/ql/src/test/queries/clientpositive/druid_timestamptz.q index 483004402f..116557489c 100644 --- a/ql/src/test/queries/clientpositive/druid_timestamptz.q +++ b/ql/src/test/queries/clientpositive/druid_timestamptz.q @@ -3,7 +3,7 @@ set hive.fetch.task.conversion=more; drop table tstz1_n0; -create table tstz1_n0(`__time` timestamp with local time zone, n string, v integer) +create external table tstz1_n0(`__time` timestamp with local time zone, n string, v integer) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR"); diff --git a/ql/src/test/queries/clientpositive/druid_timestamptz2.q b/ql/src/test/queries/clientpositive/druid_timestamptz2.q index 8f573c8c9b..07a3cdc34c 100644 --- a/ql/src/test/queries/clientpositive/druid_timestamptz2.q +++ b/ql/src/test/queries/clientpositive/druid_timestamptz2.q @@ -3,7 +3,7 @@ use druid_test_dst; -create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double); +create external table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double); insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4); insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1); insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4); @@ -11,7 +11,7 @@ insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1); insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2); insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2); -CREATE TABLE druid_test_table_1 +CREATE EXTERNAL TABLE druid_test_table_1 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -20,7 +20,7 @@ FROM druid_test_dst.test_base_table; select * FROM druid_test_table_1; -CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) +CREATE EXTERNAL TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY"); @@ -36,7 +36,7 @@ select * FROM druid_test_table_2; SET TIME ZONE UTC; -CREATE TABLE druid_test_table_utc +CREATE EXTERNAL TABLE druid_test_table_utc STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -45,7 +45,7 @@ FROM druid_test_dst.test_base_table; select * FROM druid_test_table_utc; -CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) +CREATE EXTERNAL TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY"); diff --git a/ql/src/test/queries/clientpositive/druidkafkamini_basic.q b/ql/src/test/queries/clientpositive/druidkafkamini_basic.q index 2eb9dbc5a3..b9f817eb20 100644 --- a/ql/src/test/queries/clientpositive/druidkafkamini_basic.q +++ b/ql/src/test/queries/clientpositive/druidkafkamini_basic.q @@ -1,5 +1,5 @@ SET hive.vectorized.execution.enabled=false; -CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int) +CREATE EXTERNAL TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "MONTH", diff --git a/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q b/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q index 71d1c5622d..f196c9f004 100644 --- a/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q +++ b/ql/src/test/queries/clientpositive/druidmini_dynamic_partition.q @@ -1,6 +1,6 @@ --! qt:dataset:alltypesorc SET hive.vectorized.execution.enabled=false; -CREATE TABLE druid_partitioned_table_0 +CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -21,7 +21,7 @@ CREATE TABLE druid_partitioned_table_0 cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL; -EXPLAIN CREATE TABLE druid_partitioned_table +EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -44,7 +44,7 @@ EXPLAIN CREATE TABLE druid_partitioned_table -CREATE TABLE druid_partitioned_table +CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -155,7 +155,7 @@ SELECT sum(cint), sum(cbigint) FROM druid_partitioned_table ; set hive.druid.indexer.partition.size.max=10; -CREATE TABLE druid_max_size_partition +CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", diff --git a/ql/src/test/queries/clientpositive/druidmini_expressions.q b/ql/src/test/queries/clientpositive/druidmini_expressions.q index fad8f73520..54cbf2a845 100644 --- a/ql/src/test/queries/clientpositive/druidmini_expressions.q +++ b/ql/src/test/queries/clientpositive/druidmini_expressions.q @@ -1,7 +1,7 @@ --! qt:dataset:alltypesorc SET hive.vectorized.execution.enabled=false; -CREATE TABLE druid_table_n0 +CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/queries/clientpositive/druidmini_extractTime.q b/ql/src/test/queries/clientpositive/druidmini_extractTime.q index 95413612ea..6364b29593 100644 --- a/ql/src/test/queries/clientpositive/druidmini_extractTime.q +++ b/ql/src/test/queries/clientpositive/druidmini_extractTime.q @@ -1,7 +1,7 @@ --! qt:dataset:alltypesorc SET hive.vectorized.execution.enabled=false; -CREATE TABLE druid_table +CREATE EXTERNAL TABLE druid_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -181,7 +181,7 @@ SELECT CAST(`__time` AS DATE) AS `x_date` FROM druid_table ORDER BY `x_date` LIM create table test_extract_from_string_base_table(`timecolumn` timestamp, `date_c` string, `timestamp_c` string, `metric_c` double); insert into test_extract_from_string_base_table values ('2015-03-08 00:00:00', '2015-03-10', '2015-03-08 05:30:20', 5.0); -CREATE TABLE druid_test_extract_from_string_table +CREATE EXTERNAL TABLE druid_test_extract_from_string_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS select diff --git a/ql/src/test/queries/clientpositive/druidmini_floorTime.q b/ql/src/test/queries/clientpositive/druidmini_floorTime.q index a526413dfd..2c2420922a 100644 --- a/ql/src/test/queries/clientpositive/druidmini_floorTime.q +++ b/ql/src/test/queries/clientpositive/druidmini_floorTime.q @@ -1,7 +1,7 @@ --! qt:dataset:alltypesorc SET hive.vectorized.execution.enabled=false; -CREATE TABLE druid_table_n2 +CREATE EXTERNAL TABLE druid_table_n2 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/queries/clientpositive/druidmini_joins.q b/ql/src/test/queries/clientpositive/druidmini_joins.q index 720127ed3f..901b946aaa 100644 --- a/ql/src/test/queries/clientpositive/druidmini_joins.q +++ b/ql/src/test/queries/clientpositive/druidmini_joins.q @@ -5,7 +5,7 @@ SET hive.explain.user=false; DROP TABLE druid_table_with_nulls; -CREATE TABLE druid_table_with_nulls +CREATE EXTERNAL TABLE druid_table_with_nulls STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") AS diff --git a/ql/src/test/queries/clientpositive/druidmini_masking.q b/ql/src/test/queries/clientpositive/druidmini_masking.q index f0f2c0cbf6..4ad54c1b70 100644 --- a/ql/src/test/queries/clientpositive/druidmini_masking.q +++ b/ql/src/test/queries/clientpositive/druidmini_masking.q @@ -1,7 +1,7 @@ set hive.mapred.mode=nonstrict; set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; -CREATE TABLE masking_test_druid +CREATE EXTERNAL TABLE masking_test_druid STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") AS diff --git a/ql/src/test/queries/clientpositive/druidmini_mv.q b/ql/src/test/queries/clientpositive/druidmini_mv.q index dd8cf6a512..a6e6e91ab8 100644 --- a/ql/src/test/queries/clientpositive/druidmini_mv.q +++ b/ql/src/test/queries/clientpositive/druidmini_mv.q @@ -5,6 +5,7 @@ set hive.support.concurrency=true; set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; set hive.strict.checks.cartesian.product=false; set hive.materializedview.rewriting=true; +SET hive.ctas.external.tables=true; CREATE TABLE cmv_basetable_n2 STORED AS orc diff --git a/ql/src/test/queries/clientpositive/druidmini_test1.q b/ql/src/test/queries/clientpositive/druidmini_test1.q index f93665ed63..80eab0cbf2 100644 --- a/ql/src/test/queries/clientpositive/druidmini_test1.q +++ b/ql/src/test/queries/clientpositive/druidmini_test1.q @@ -1,5 +1,7 @@ --! qt:dataset:alltypesorc -CREATE TABLE druid_table_n3 +SET hive.ctas.external.tables=true; + +CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/queries/clientpositive/druidmini_test_alter.q b/ql/src/test/queries/clientpositive/druidmini_test_alter.q index e19a00637b..af12376ba9 100644 --- a/ql/src/test/queries/clientpositive/druidmini_test_alter.q +++ b/ql/src/test/queries/clientpositive/druidmini_test_alter.q @@ -1,4 +1,4 @@ -CREATE TABLE druid_alltypesorc_n0 +CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/queries/clientpositive/druidmini_test_insert.q b/ql/src/test/queries/clientpositive/druidmini_test_insert.q index 03657fd7d7..15b8b856ac 100644 --- a/ql/src/test/queries/clientpositive/druidmini_test_insert.q +++ b/ql/src/test/queries/clientpositive/druidmini_test_insert.q @@ -1,5 +1,5 @@ --! qt:dataset:alltypesorc -CREATE TABLE druid_alltypesorc +CREATE EXTERNAL TABLE druid_alltypesorc STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -63,7 +63,7 @@ DROP TABLE druid_alltypesorc; insert into test_table_n9 values ('2015-01-08 00:00:00', 'i1-start', 4); insert into test_table_n9 values ('2015-01-08 23:59:59', 'i1-end', 1); - CREATE TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float) + CREATE EXTERNAL TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY"); @@ -90,7 +90,7 @@ insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1); insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2); insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2); -CREATE TABLE druid_test_table_n9 +CREATE EXTERNAL TABLE druid_test_table_n9 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS diff --git a/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out b/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out index ed1a6eae2e..fa9583a8e7 100644 --- a/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out +++ b/ql/src/test/results/clientpositive/druid/druid_timestamptz.q.out @@ -2,13 +2,13 @@ PREHOOK: query: drop table tstz1_n0 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table tstz1_n0 POSTHOOK: type: DROPTABLE -PREHOOK: query: create table tstz1_n0(`__time` timestamp with local time zone, n string, v integer) +PREHOOK: query: create external table tstz1_n0(`__time` timestamp with local time zone, n string, v integer) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@tstz1_n0 -POSTHOOK: query: create table tstz1_n0(`__time` timestamp with local time zone, n string, v integer) +POSTHOOK: query: create external table tstz1_n0(`__time` timestamp with local time zone, n string, v integer) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") POSTHOOK: type: CREATETABLE diff --git a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out index c71a435fee..b62095aad6 100644 --- a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out +++ b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out @@ -10,11 +10,11 @@ PREHOOK: Input: database:druid_test_dst POSTHOOK: query: use druid_test_dst POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:druid_test_dst -PREHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double) +PREHOOK: query: create external table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double) PREHOOK: type: CREATETABLE PREHOOK: Output: database:druid_test_dst PREHOOK: Output: druid_test_dst@test_base_table -POSTHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double) +POSTHOOK: query: create external table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:druid_test_dst POSTHOOK: Output: druid_test_dst@test_base_table @@ -84,7 +84,7 @@ POSTHOOK: Output: druid_test_dst@test_base_table POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT [] POSTHOOK: Lineage: test_base_table.num_l SCRIPT [] POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT [] -PREHOOK: query: CREATE TABLE druid_test_table_1 +PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_1 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -94,7 +94,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: druid_test_dst@test_base_table PREHOOK: Output: database:druid_test_dst PREHOOK: Output: druid_test_dst@druid_test_table_1 -POSTHOOK: query: CREATE TABLE druid_test_table_1 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_1 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -121,13 +121,13 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### 2015-03-09 23:59:59.0 US/Pacific i2-end 1.0 2015-03-10 00:00:00.0 US/Pacific i3-start 2.0 2015-03-10 23:59:59.0 US/Pacific i3-end 2.0 -PREHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) +PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") PREHOOK: type: CREATETABLE PREHOOK: Output: database:druid_test_dst PREHOOK: Output: druid_test_dst@druid_test_table_2 -POSTHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) +POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") POSTHOOK: type: CREATETABLE @@ -195,7 +195,7 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### 2015-03-09 23:59:59.0 US/Pacific i2-end 1.0 2015-03-10 00:00:00.0 US/Pacific i3-start 2.0 2015-03-10 23:59:59.0 US/Pacific i3-end 2.0 -PREHOOK: query: CREATE TABLE druid_test_table_utc +PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -205,7 +205,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: druid_test_dst@test_base_table PREHOOK: Output: database:druid_test_dst PREHOOK: Output: druid_test_dst@druid_test_table_utc -POSTHOOK: query: CREATE TABLE druid_test_table_utc +POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -232,13 +232,13 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### 2015-03-09 23:59:59.0 UTC i2-end 1.0 2015-03-10 00:00:00.0 UTC i3-start 2.0 2015-03-10 23:59:59.0 UTC i3-end 2.0 -PREHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) +PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") PREHOOK: type: CREATETABLE PREHOOK: Output: database:druid_test_dst PREHOOK: Output: druid_test_dst@druid_test_table_utc2 -POSTHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) +POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") POSTHOOK: type: CREATETABLE diff --git a/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out b/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out index 774ed54ad6..f76a36d249 100644 --- a/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out +++ b/ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int) +PREHOOK: query: CREATE EXTERNAL TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "MONTH", @@ -15,7 +15,7 @@ PREHOOK: query: CREATE TABLE druid_kafka_test(`__time` timestamp, page string, ` PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_kafka_test -POSTHOOK: query: CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int) +POSTHOOK: query: CREATE EXTERNAL TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, language string, added int, deleted int) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "MONTH", @@ -340,6 +340,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat properties: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"__time":"true","added":"true","deleted":"true","language":"true","page":"true","user":"true"}} + EXTERNAL TRUE bucket_count -1 bucketing_version 2 column.name.delimiter , @@ -380,6 +381,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat properties: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"__time":"true","added":"true","deleted":"true","language":"true","page":"true","user":"true"}} + EXTERNAL TRUE bucket_count -1 bucketing_version 2 column.name.delimiter , diff --git a/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out b/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out index 1278a577a4..55387877a0 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_dynamic_partition.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_partitioned_table_0 +PREHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -22,7 +22,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_partitioned_table_0 -POSTHOOK: query: CREATE TABLE druid_partitioned_table_0 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table_0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -57,7 +57,7 @@ POSTHOOK: Lineage: druid_partitioned_table_0.csmallint SIMPLE [(alltypesorc)allt POSTHOOK: Lineage: druid_partitioned_table_0.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ] POSTHOOK: Lineage: druid_partitioned_table_0.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ] POSTHOOK: Lineage: druid_partitioned_table_0.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ] -PREHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table +PREHOOK: query: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -78,7 +78,7 @@ PREHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table cboolean2 FROM alltypesorc where ctimestamp1 IS NOT NULL PREHOOK: type: CREATETABLE_AS_SELECT -POSTHOOK: query: EXPLAIN CREATE TABLE druid_partitioned_table +POSTHOOK: query: EXPLAIN CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -166,6 +166,7 @@ STAGE PLANS: druid.query.granularity MINUTE druid.segment.granularity HOUR druid.segment.targetShardsPerGranularity 6 + isExternal: true Stage: Stage-3 Stats Work @@ -177,7 +178,7 @@ STAGE PLANS: hdfs directory: true destination: hdfs://### HDFS PATH ### -PREHOOK: query: CREATE TABLE druid_partitioned_table +PREHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -201,7 +202,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_partitioned_table -POSTHOOK: query: CREATE TABLE druid_partitioned_table +POSTHOOK: query: CREATE EXTERNAL TABLE druid_partitioned_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -570,7 +571,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@druid_partitioned_table POSTHOOK: Output: hdfs://### HDFS PATH ### 1408069801800 10992545287 -PREHOOK: query: CREATE TABLE druid_max_size_partition +PREHOOK: query: CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", @@ -593,7 +594,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_max_size_partition -POSTHOOK: query: CREATE TABLE druid_max_size_partition +POSTHOOK: query: CREATE EXTERNAL TABLE druid_max_size_partition STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ( "druid.segment.granularity" = "HOUR", diff --git a/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out b/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out index fd77a915d9..e322d8f91f 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_table_n0 +PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_n0 -POSTHOOK: query: CREATE TABLE druid_table_n0 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -270,6 +270,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat properties: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} + EXTERNAL TRUE bucket_count -1 bucketing_version 2 column.name.delimiter , @@ -301,6 +302,7 @@ STAGE PLANS: output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat properties: COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} + EXTERNAL TRUE bucket_count -1 bucketing_version 2 column.name.delimiter , diff --git a/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out b/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out index 30e273bae8..116c3266f4 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_table +PREHOOK: query: CREATE EXTERNAL TABLE druid_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_table -POSTHOOK: query: CREATE TABLE druid_table +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -776,7 +776,7 @@ POSTHOOK: Lineage: test_extract_from_string_base_table.date_c SCRIPT [] POSTHOOK: Lineage: test_extract_from_string_base_table.metric_c SCRIPT [] POSTHOOK: Lineage: test_extract_from_string_base_table.timecolumn SCRIPT [] POSTHOOK: Lineage: test_extract_from_string_base_table.timestamp_c SCRIPT [] -PREHOOK: query: CREATE TABLE druid_test_extract_from_string_table +PREHOOK: query: CREATE EXTERNAL TABLE druid_test_extract_from_string_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS select @@ -786,7 +786,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@test_extract_from_string_base_table PREHOOK: Output: database:default PREHOOK: Output: default@druid_test_extract_from_string_table -POSTHOOK: query: CREATE TABLE druid_test_extract_from_string_table +POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_extract_from_string_table STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS select diff --git a/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out b/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out index c54fd93a57..1c9e9c6718 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_table_n2 +PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n2 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_n2 -POSTHOOK: query: CREATE TABLE druid_table_n2 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n2 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out b/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out index 73a3c9fad3..014c7b5406 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_joins.q.out @@ -2,7 +2,7 @@ PREHOOK: query: DROP TABLE druid_table_with_nulls PREHOOK: type: DROPTABLE POSTHOOK: query: DROP TABLE druid_table_with_nulls POSTHOOK: type: DROPTABLE -PREHOOK: query: CREATE TABLE druid_table_with_nulls +PREHOOK: query: CREATE EXTERNAL TABLE druid_table_with_nulls STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") AS @@ -22,7 +22,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_with_nulls -POSTHOOK: query: CREATE TABLE druid_table_with_nulls +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_with_nulls STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") AS diff --git a/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out b/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out index 1aad9677a0..e3b50a4224 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_masking.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE masking_test_druid +PREHOOK: query: CREATE EXTERNAL TABLE masking_test_druid STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") AS @@ -17,7 +17,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: _dummy_database@_dummy_table PREHOOK: Output: database:default PREHOOK: Output: default@masking_test_druid -POSTHOOK: query: CREATE TABLE masking_test_druid +POSTHOOK: query: CREATE EXTERNAL TABLE masking_test_druid STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR") AS diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out index 4e078aa41c..7f6c6b0aa3 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_table_n3 +PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_n3 -POSTHOOK: query: CREATE TABLE druid_table_n3 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n3 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out index 439dc9c16c..a175f7650a 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_test_alter.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_alltypesorc_n0 +PREHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -16,7 +16,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_alltypesorc_n0 -POSTHOOK: query: CREATE TABLE druid_alltypesorc_n0 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc_n0 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out index c675aeb17d..0da76109ac 100644 --- a/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out +++ b/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: CREATE TABLE druid_alltypesorc +PREHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -18,7 +18,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@druid_alltypesorc -POSTHOOK: query: CREATE TABLE druid_alltypesorc +POSTHOOK: query: CREATE EXTERNAL TABLE druid_alltypesorc STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE") AS @@ -194,13 +194,13 @@ POSTHOOK: Output: druid_test_create_then_insert@test_table_n9 POSTHOOK: Lineage: test_table_n9.num_l SCRIPT [] POSTHOOK: Lineage: test_table_n9.timecolumn SCRIPT [] POSTHOOK: Lineage: test_table_n9.userid SCRIPT [] -PREHOOK: query: CREATE TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float) +PREHOOK: query: CREATE EXTERNAL TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") PREHOOK: type: CREATETABLE PREHOOK: Output: database:druid_test_create_then_insert PREHOOK: Output: druid_test_create_then_insert@druid_table_n1 -POSTHOOK: query: CREATE TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float) +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_n1 (`__time` timestamp with local time zone, `userid` string, `num_l` float) STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") POSTHOOK: type: CREATETABLE @@ -335,7 +335,7 @@ POSTHOOK: Output: druid_test_dst@test_base_table POSTHOOK: Lineage: test_base_table.num_l SCRIPT [] POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT [] POSTHOOK: Lineage: test_base_table.userid SCRIPT [] -PREHOOK: query: CREATE TABLE druid_test_table_n9 +PREHOOK: query: CREATE EXTERNAL TABLE druid_test_table_n9 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS @@ -344,7 +344,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: druid_test_dst@test_base_table PREHOOK: Output: database:druid_test_dst PREHOOK: Output: druid_test_dst@druid_test_table_n9 -POSTHOOK: query: CREATE TABLE druid_test_table_n9 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_test_table_n9 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' TBLPROPERTIES ("druid.segment.granularity" = "DAY") AS diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java index cbe89b6827..4bc819fc2b 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java @@ -568,6 +568,25 @@ public static boolean isExternalTable(Table table) { return isExternal(params); } + /** + * Determines whether an table needs to be purged or not. + * + * @param table table of interest + * + * @return true if external table needs to be purged + */ + public static boolean isExternalTablePurge(Table table) { + if (table == null) { + return false; + } + Map params = table.getParameters(); + if (params == null) { + return false; + } + + return isPropertyTrue(params, EXTERNAL_TABLE_PURGE); + } + public static boolean isExternal(Map tableParams){ return isPropertyTrue(tableParams, "EXTERNAL"); } -- 2.15.1 (Apple Git-101)