Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (revision 1202949) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (working copy) @@ -431,6 +431,13 @@ } return; } + + if(cascade){ + List tableList = getAllTables(name); + for(String table: tableList){ + dropTable(name, table, deleteData, false); + } + } client.drop_database(name, deleteData, cascade); } Index: metastore/src/test/org/apache/hadoop/hive/metastore/DummyStorageHandler.java =================================================================== --- metastore/src/test/org/apache/hadoop/hive/metastore/DummyStorageHandler.java (revision 0) +++ metastore/src/test/org/apache/hadoop/hive/metastore/DummyStorageHandler.java (revision 0) @@ -0,0 +1,190 @@ +package org.apache.hadoop.hive.metastore; + +import java.io.File; +import java.io.IOException; +import java.util.Map; +import java.util.Properties; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.io.HiveInputFormat; +import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.SerDeStats; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.mapred.OutputFormat; +import org.apache.hadoop.mapred.SequenceFileOutputFormat; + +class DummyStorageHandler implements HiveMetaHook, HiveStorageHandler { + + static String BASE_DIR = System.getProperty("build.dir.hive"); + Configuration config; + + @Override + public Configuration getConf() { + return config; + } + + @Override + public void setConf(Configuration conf) { + this.config = conf; + } + + @Override + public void configureTableJobProperties(TableDesc arg0, + Map arg1) { + } + + @Override + public Class getInputFormatClass() { + return HiveInputFormat.class; + } + + @Override + public HiveMetaHook getMetaHook() { + return this; + } + + @Override + public Class getOutputFormatClass() { + + return SequenceFileOutputFormat.class; + } + + @Override + public Class getSerDeClass() { + return DummySerDe.class; + } + + @Override + public void preCreateTable(Table table) throws MetaException { + try { + createTableDir(table); + } catch (IOException e) { + new MetaException(e.getMessage()); + } + + } + + @Override + public void rollbackCreateTable(Table table) throws MetaException { + } + + @Override + public void commitCreateTable(Table table) throws MetaException { + } + + @Override + public void preDropTable(Table table) throws MetaException { + try { + deleteTableDir(table); + } catch (IOException e) { + new MetaException(e.getMessage()); + } + + } + + @Override + public void rollbackDropTable(Table table) throws MetaException { + + } + + @Override + public void commitDropTable(Table table, boolean deleteData) + throws MetaException { + + } + + private String getTableName(Table tbl) { + String tableName = tbl.getTableName(); + + if (tbl.getDbName().equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + tableName = tbl.getTableName(); + } else { + tableName = tbl.getDbName() + "." + tbl.getTableName(); + } + + return tableName; + } + + private void createTableDir(Table tbl) throws IOException { + + String tableName = getTableName(tbl); + String baseDir = getStorageHandlerDir(); + File tableDir = new File(baseDir + "/" + tableName); + boolean success = tableDir.mkdirs(); + if (!success) { + throw new IOException( + "Unable to create table with storage handlers."); + } + + } + + private void deleteTableDir(Table tbl) throws IOException { + String tableName = getTableName(tbl); + String baseDir = getStorageHandlerDir(); + File tableDir = new File(baseDir + "/" + tableName); + boolean success = tableDir.delete(); + if (!success) { + throw new IOException( + "Unable to create table with storage handlers."); + } + + } + + static String getStorageHandlerDir() throws IOException { + + File shDir = new File(BASE_DIR + "/metastore/test/data/storage_handler"); + if (shDir.exists() == false) { + + boolean success = shDir.mkdirs(); + if (!success) { + throw new IOException( + "Unable to create base directory for storage handlers."); + } + + } + return shDir.getCanonicalPath(); + } + +} + +class DummySerDe implements SerDe { + + @Override + public Object deserialize(Writable arg0) throws SerDeException { + return null; + } + + @Override + public ObjectInspector getObjectInspector() throws SerDeException { + return null; + } + + @Override + public SerDeStats getSerDeStats() { + return null; + } + + @Override + public void initialize(Configuration arg0, Properties arg1) + throws SerDeException { + } + + @Override + public Class getSerializedClass() { + return null; + } + + @Override + public Writable serialize(Object arg0, ObjectInspector arg1) + throws SerDeException { + return null; + } + +} Index: metastore/src/test/org/apache/hadoop/hive/metastore/TestDropDBCascade.java =================================================================== --- metastore/src/test/org/apache/hadoop/hive/metastore/TestDropDBCascade.java (revision 0) +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestDropDBCascade.java (revision 0) @@ -0,0 +1,146 @@ +package org.apache.hadoop.hive.metastore; + +import static org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.util.StringUtils; +import org.apache.thrift.TException; + +public class TestDropDBCascade extends TestCase { + private static HiveMetaStoreClient client; + protected HiveConf hiveConf; + + @Override + protected void setUp() throws Exception { + super.setUp(); + System.setProperty(ConfVars.METASTORE_EVENT_CLEAN_FREQ.varname, "2"); + System.setProperty(ConfVars.METASTORE_EVENT_EXPIRY_DURATION.varname, + "5"); + hiveConf = new HiveConf(this.getClass()); + hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, + "false"); + + } + + public void testdropDB() throws MetaException, CommandNeedRetryException, + IOException, AlreadyExistsException, InvalidObjectException, + TException, NoSuchObjectException, InvalidOperationException { + + String databaseName = "testdatabase"; + String tableName = "testtable"; + Database db = new Database(); + db.setName(databaseName); + + Table tbl = new Table(); + tbl.setDbName(databaseName); + tbl.setTableName(tableName); + tbl.setTableType(TableType.EXTERNAL_TABLE.toString()); + StorageDescriptor sd = new StorageDescriptor(); + sd.setCols(getTableColumns()); + tbl.setPartitionKeys(new ArrayList()); + tbl.setSd(sd); + sd.setBucketCols(new ArrayList(2)); + sd.setSerdeInfo(new SerDeInfo()); + sd.getSerdeInfo().setName(tbl.getTableName()); + sd.getSerdeInfo().setParameters(new HashMap()); + sd.getSerdeInfo().getParameters() + .put(Constants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib( + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class + .getName()); + Map tableParams = new HashMap(); + tableParams + .put(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, + "org.apache.hadoop.hive.metastore.DummyStorageHandler"); + tbl.setParameters(tableParams); + + try { + client = new HiveMetaStoreClient(hiveConf, + getHiveMetaHookLoader(tbl)); + } catch (Throwable e) { + System.err.println("Unable to open the metastore"); + System.err.println(StringUtils.stringifyException(e)); + throw new IOException(e); + } + + client.createDatabase(db); + client.createTable(tbl); + String shDir = DummyStorageHandler.getStorageHandlerDir(); + File tableDir = new File(shDir + "/" + databaseName + "." + tableName); + assertTrue(tableDir.exists()); + client.dropDatabase(databaseName, true, false, true); + assertTrue(tableDir.exists() == false); + + } + + @Override + protected void tearDown() throws Exception { + File storageHandlerDir = new File( + DummyStorageHandler.getStorageHandlerDir()); + storageHandlerDir.delete(); + super.tearDown(); + } + + private List getTableColumns() { + List fields = new ArrayList(); + fields.add(new FieldSchema("key", Constants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("english", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("spanish", Constants.STRING_TYPE_NAME, "")); + return fields; + } + + private HiveMetaHookLoader getHiveMetaHookLoader(Table tbl) { + + HiveMetaHookLoader hookLoader = new HiveMetaHookLoader() { + public HiveMetaHook getHook( + org.apache.hadoop.hive.metastore.api.Table tbl) + throws MetaException { + + try { + if (tbl == null) { + return null; + } + HiveStorageHandler storageHandler = HiveUtils + .getStorageHandler(hiveConf, tbl.getParameters() + .get(META_TABLE_STORAGE)); + if (storageHandler == null) { + return null; + } + return storageHandler.getMetaHook(); + } catch (HiveException ex) { + throw new MetaException("Failed to load storage handler: " + + ex.getMessage()); + } + } + }; + return hookLoader; + } + +}