diff --git a/ql/src/test/queries/clientnegative/drop_table_used_by_mv.q b/ql/src/test/queries/clientnegative/drop_table_used_by_mv.q new file mode 100644 index 0000000000..e5949a4f9a --- /dev/null +++ b/ql/src/test/queries/clientnegative/drop_table_used_by_mv.q @@ -0,0 +1,8 @@ +create table mytable (key int, value string); +insert into mytable values (1, 'val1'), (2, 'val2'); + +create materialized view mv1 as +select key, value from mytable; + +drop table mytable; + diff --git a/ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out b/ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out new file mode 100644 index 0000000000..5a9cabe7a1 --- /dev/null +++ b/ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out @@ -0,0 +1,35 @@ +PREHOOK: query: create table mytable (key int, value string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@mytable +POSTHOOK: query: create table mytable (key int, value string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@mytable +PREHOOK: query: insert into mytable values (1, 'val1'), (2, 'val2') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@mytable +POSTHOOK: query: insert into mytable values (1, 'val1'), (2, 'val2') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@mytable +POSTHOOK: Lineage: mytable.key SCRIPT [] +POSTHOOK: Lineage: mytable.value SCRIPT [] +PREHOOK: query: create materialized view mv1 as +select key, value from mytable +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: default@mytable +PREHOOK: Output: database:default +PREHOOK: Output: default@mv1 +POSTHOOK: query: create materialized view mv1 as +select key, value from mytable +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: default@mytable +POSTHOOK: Output: database:default +POSTHOOK: Output: default@mv1 +PREHOOK: query: drop table mytable +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@mytable +PREHOOK: Output: default@mytable +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Exception thrown flushing changes to datastore) diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 84fac2dfa4..44b518e495 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -1141,6 +1141,31 @@ private void drop_database_core(RawStore ms, drop_function(name, funcName); } + // First pass will drop the materialized views + List materializedViews; + try { + materializedViews = ms.getTableObjectsByName(name, + get_tables_by_type(name, ".*", TableType.MATERIALIZED_VIEW.toString())); + } catch (UnknownDBException e) { + throw new MetaException(e.getMessage()); + } + for (Table materializedView : materializedViews) { + if (materializedView.getSd().getLocation() != null) { + Path materializedViewPath = wh.getDnsPath(new Path(materializedView.getSd().getLocation())); + if (!wh.isWritable(materializedViewPath.getParent())) { + throw new MetaException("Database metadata not deleted since table: " + + materializedView.getTableName() + " has a parent location " + materializedViewPath.getParent() + + " which is not writable by " + SecurityUtils.getUser()); + } + + if (!isSubdirectory(databasePath, materializedViewPath)) { + tablePaths.add(materializedViewPath); + } + } + // Drop the table but not its data + drop_table(name, materializedView.getTableName(), false); + } + // drop tables before dropping db int tableBatchSize = MetastoreConf.getIntVar(conf, ConfVars.BATCH_RETRIEVE_MAX); @@ -1159,7 +1184,6 @@ private void drop_database_core(RawStore ms, if (tables != null && !tables.isEmpty()) { for (Table table : tables) { - // If the table is not external and it might not be in a subdirectory of the database // add it's locations to the list of paths to delete Path tablePath = null; diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 0e561f82ff..0ce71f724a 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -908,15 +908,24 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownD } if (cascade) { - List tableList = getAllTables(name); - for (String table : tableList) { - try { - // Subclasses can override this step (for example, for temporary tables) - dropTable(name, table, deleteData, true); - } catch (UnsupportedOperationException e) { - // Ignore Index tables, those will be dropped with parent tables - } + // Note that this logic may drop some of the tables of the database + // even if the drop database fail for any reason + // TODO: Fix this + List materializedViews = getTables(name, "*", TableType.MATERIALIZED_VIEW); + for (String table : materializedViews) { + // First we delete the materialized views + dropTable(name, table, deleteData, true); + } + List tableList = getAllTables(name); + for (String table : tableList) { + // Now we delete the rest of tables + try { + // Subclasses can override this step (for example, for temporary tables) + dropTable(name, table, deleteData, true); + } catch (UnsupportedOperationException e) { + // Ignore Index tables, those will be dropped with parent tables } + } } client.drop_database(name, deleteData, cascade); } diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 88d88ed4df..d7dfc1c96b 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -1655,7 +1655,13 @@ private MTable getMTable(String db, String table) { query.declareParameters("java.lang.String db, java.util.Collection tbl_names"); Collection mtables = (Collection) query.execute(db, lowered_tbl_names); for (Iterator iter = mtables.iterator(); iter.hasNext();) { - tables.add(convertToTable((MTable) iter.next())); + Table tbl = convertToTable((MTable) iter.next()); + // Retrieve creation metadata if needed + if (TableType.MATERIALIZED_VIEW.toString().equals(tbl.getTableType())) { + tbl.setCreationMetadata( + convertToCreationMetadata(getCreationMetadata(tbl.getDbName(), tbl.getTableName()))); + } + tables.add(tbl); } committed = commitTransaction(); } finally { diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/TableBuilder.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/TableBuilder.java index 2b9f816960..f229513f37 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/TableBuilder.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/TableBuilder.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore.client.builder; import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -37,6 +38,7 @@ */ public class TableBuilder extends StorageDescriptorBuilder { private String dbName, tableName, owner, viewOriginalText, viewExpandedText, type; + private CreationMetadata cm; private List partCols; private int createTime, lastAccessTime, retention; private Map tableParams; @@ -87,6 +89,11 @@ public TableBuilder setType(String type) { return this; } + public TableBuilder setCreationMetadata(CreationMetadata cm) { + this.cm = cm; + return this; + } + public TableBuilder setPartCols(List partCols) { this.partCols = partCols; return this; @@ -155,6 +162,9 @@ public Table build() throws MetaException { if (rewriteEnabled) { t.setRewriteEnabled(true); } + if (cm != null) { + t.setCreationMetadata(cm); + } if (temporary) { t.setTemporary(temporary); } diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 7091c5b2f5..53c83ba803 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -39,6 +39,8 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; +import com.google.common.collect.Sets; +import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder; import org.apache.hadoop.hive.metastore.client.builder.TableBuilder; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; @@ -2726,6 +2728,19 @@ private void createTable(String dbName, String tableName) throws TException { client.createTable(t); } + private void createMaterializedView(String dbName, String tableName, Set tablesUsed) + throws TException { + Table t = new TableBuilder() + .setDbName(dbName) + .setTableName(tableName) + .setType(TableType.MATERIALIZED_VIEW.name()) + .setCreationMetadata(new CreationMetadata(dbName, tableName, tablesUsed)) + .addCol("foo", "string") + .addCol("bar", "string") + .build(); + client.createTable(t); + } + private List createPartitions(String dbName, Table tbl, List> values) throws Throwable { int i = 1; @@ -2833,6 +2848,7 @@ public void testGetTableObjects() throws Exception { for (String tableName : tableNames) { createTable(dbName, tableName); } + createMaterializedView(dbName, "mv1", Sets.newHashSet("db.table1", "db.table2")); // Test List
tableObjs = client.getTableObjectsByName(dbName, tableNames); @@ -2847,6 +2863,44 @@ public void testGetTableObjects() throws Exception { client.dropDatabase(dbName, true, true, true); } + @Test + public void testDropDatabaseCascadeMVMultiDB() throws Exception { + String dbName1 = "db1"; + String tableName1 = "table1"; + String dbName2 = "db2"; + String tableName2 = "table2"; + String mvName = "mv1"; + + // Setup + silentDropDatabase(dbName1); + silentDropDatabase(dbName2); + + Database db1 = new Database(); + db1.setName(dbName1); + client.createDatabase(db1); + createTable(dbName1, tableName1); + Database db2 = new Database(); + db2.setName(dbName2); + client.createDatabase(db2); + createTable(dbName2, tableName2); + + createMaterializedView(dbName2, mvName, Sets.newHashSet("db1.table1", "db2.table2")); + + boolean exceptionFound = false; + try { + // Cannot drop db1 because mv1 uses one of its tables + // TODO: Error message coming from metastore is currently not very concise + // (foreign key violation), we should make it easily understandable + client.dropDatabase(dbName1, true, true, true); + } catch (Exception e) { + exceptionFound = true; + } + assertTrue(exceptionFound); + + client.dropDatabase(dbName2, true, true, true); + client.dropDatabase(dbName1, true, true, true); + } + @Test public void testDBLocationChange() throws IOException, TException { final String dbName = "alterDbLocation";