diff --git hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 00a9fbe..9b44fa5 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -88,6 +88,7 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DESCDATABASE: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: + case HiveParser.TOK_ALTERDATABASE_RENAME: // Index DDL case HiveParser.TOK_ALTERINDEX_PROPERTIES: @@ -172,6 +173,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DESCDATABASE: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: + case HiveParser.TOK_ALTERDATABASE_RENAME: // Index DDL case HiveParser.TOK_ALTERINDEX_PROPERTIES: diff --git metastore/src/java/org/apache/hadoop/hive/metastore/AlterHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/AlterHandler.java index d872be5..e180448 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/AlterHandler.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/AlterHandler.java @@ -21,9 +21,11 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; @@ -102,4 +104,13 @@ public abstract Partition alterPartition(final RawStore msdb, Warehouse wh, fina final String dbname, final String name, final List new_part) throws InvalidOperationException, InvalidObjectException, AlreadyExistsException, MetaException; + + /** + * handles alter database + * @param msdb + * @param wh + * @param dbName + * @param db + */ + public void alterDatabase(RawStore msdb, Warehouse wh, String dbName, Database db) throws NoSuchObjectException, MetaException, InvalidOperationException; } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index 0dfbc5a..6e2fbe9 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -421,6 +422,172 @@ public Partition alterPartition(final RawStore msdb, Warehouse wh, final String } return oldParts; } + + public void alterDatabase(RawStore msdb, Warehouse wh, String dbName, Database newDb) + throws NoSuchObjectException, MetaException, InvalidOperationException { + if (newDb == null) { + throw new InvalidOperationException("New table is invalid: " + newDb); + } + + if (!MetaStoreUtils.validateName(newDb.getName())) { + throw new InvalidOperationException(newDb.getName() + + " is not a valid object name"); + } + + Path srcDbPath = null; + FileSystem srcDbFs = null; + Path destDbPath = null; + FileSystem destDbFs = null; + + boolean success = false; + String oldDbLoc = null; + String newDbLoc = null; + boolean moveData = false; + boolean rename = false; + Database oldDb = null; + try { + msdb.openTransaction(); + dbName = dbName.toLowerCase(); + + // check if database with the new name already exists + if (!newDb.getName().equalsIgnoreCase(dbName)) { + if (msdb.existsDatabase(newDb.getName())) { + throw new InvalidOperationException("new database " + newDb.getName() + + " already exists"); + } + rename = true; + } + + // get old database + oldDb = msdb.getDatabase(dbName); + if (oldDb == null) { + throw new InvalidOperationException("database " + oldDb.getName() + + " doesn't exist"); + } + + newDbLoc = new Path(wh.getDatabasePath(newDb).getParent(), + newDb.getName()).toString(); + Path newDbPath = constructRenamedPath(new Path(newDbLoc), + new Path(newDb.getLocationUri())); + newDbLoc = newDbPath.toString(); + newDb.setLocationUri(newDbLoc); + oldDbLoc = oldDb.getLocationUri(); + moveData = true; + // check that destination does not exist otherwise we will be + // overwriting data + srcDbPath = new Path(oldDbLoc); + srcDbFs = wh.getFs(srcDbPath); + destDbPath = new Path(newDbLoc); + destDbFs = wh.getFs(destDbPath); + // check that src and dest are on the same file system + if (srcDbFs != destDbFs) { + throw new InvalidOperationException("table new location " + destDbPath + + " is on a different file system than the old location " + + srcDbPath + ". This operation is not supported"); + } + try { + srcDbFs.exists(srcDbPath); // check that src exists and also checks + // permissions necessary + if (destDbFs.exists(destDbPath)) { + throw new InvalidOperationException("New location for this database " + + newDb.getName() + " already exists : " + destDbPath); + } + } catch (IOException e) { + Warehouse.closeFs(srcDbFs); + Warehouse.closeFs(destDbFs); + throw new InvalidOperationException("Unable to access new location " + + destDbPath + " for database " + newDb.getName()); + } + + // if this alter is a rename, the table is not a virtual view, the user + // didn't change the default location (or new location is empty), and + // table is not an external table, that means user is asking metastore to + // move data to the new location corresponding to the new name + if (rename) { + for (String tableName : msdb.getAllTables(dbName)) { + Table table = msdb.getTable(dbName, tableName); + + String oldTblLoc; + String newTblLoc; + + // that means user is asking metastore to move data to new location + // corresponding to the new name + + oldTblLoc = table.getSd().getLocation(); + // get new location + newTblLoc = wh.getTablePath(newDb, tableName).toString(); + Path newTblPath = constructRenamedPath(new Path(newTblLoc), + new Path(table.getSd().getLocation())); + newTblLoc = newTblPath.toString(); + table.getSd().setLocation(newTblLoc); + + // also the location field in partition + List parts = msdb.getPartitions(dbName, tableName, -1); + for (Partition part : parts) { + String oldPartLoc = part.getSd().getLocation(); + Path oldPartLocPath = new Path(oldPartLoc); + String oldTblLocPath = new Path(oldTblLoc).toUri().getPath(); + String newTblLocPath = new Path(newTblLoc).toUri().getPath(); + if (oldPartLoc.contains(oldTblLocPath)) { + Path newPartLocPath = null; + URI oldUri = oldPartLocPath.toUri(); + String newPath = oldUri.getPath().replace(oldTblLocPath, newTblLocPath); + newPartLocPath = new Path(oldUri.getScheme(), oldUri.getAuthority(), newPath); + part.getSd().setLocation(newPartLocPath.toString()); + msdb.alterPartition(dbName, tableName, part.getValues(), part); + } + } + msdb.alterTable(dbName, tableName, table); + } + } + // now finally call alter table + msdb.alterDatabase(dbName, newDb); + // commit the changes + success = msdb.commitTransaction(); + } catch (InvalidObjectException e) { + LOG.debug(e); + throw new InvalidOperationException( + "Unable to change partition or table." + + " Check metastore logs for detailed stack." + e.getMessage()); + } catch (NoSuchObjectException e) { + LOG.debug(e); + throw new InvalidOperationException( + "Unable to change partition or table. Database " + dbName + " does not exist" + + " Check metastore logs for detailed stack." + e.getMessage()); + } finally { + if (!success) { + msdb.rollbackTransaction(); + } + if (success && moveData) { + // change the file name in hdfs + // check that src exists otherwise there is no need to copy the data + try { + if (srcDbFs.exists(srcDbPath)) { + // rename the src to destination + srcDbFs.rename(srcDbPath, destDbPath); + } + } catch (IOException e) { + boolean revertMetaDataTransaction = false; + try { + msdb.openTransaction(); + msdb.alterDatabase(dbName, oldDb); + revertMetaDataTransaction = msdb.commitTransaction(); + } catch (Exception e1) { + LOG.error("Reverting metadata operation failed During HDFS operation failed", e1); + if (!revertMetaDataTransaction) { + msdb.rollbackTransaction(); + } + } + throw new InvalidOperationException("Unable to access old location " + + srcDbPath + " for database " + dbName); + } + } + } + if (!success) { + throw new MetaException("Committing the alter table transaction was not successful."); + } + + } private boolean checkPartialPartKeysEqual(List oldPartKeys, List newPartKeys) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 591f7d5..0dcd69c 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -89,6 +89,7 @@ import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent; import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterTableEvent; import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent; @@ -99,6 +100,7 @@ import org.apache.hadoop.hive.metastore.events.EventCleanerTask; import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent; import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreAlterDatabaseEvent; import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent; import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent; @@ -641,14 +643,22 @@ public Database get_database(final String name) throws NoSuchObjectException, return db; } - public void alter_database(final String dbName, final Database db) + public void alter_database(final String dbName, final Database newDatabase) throws NoSuchObjectException, TException, MetaException { - startFunction("alter_database" + dbName); + startFunction("alter_database", ": " + dbName); boolean success = false; Exception ex = null; try { - getMS().alterDatabase(dbName, db); + Database oldDatabase = get_database(dbName); + firePreEvent(new PreAlterDatabaseEvent(oldDatabase, newDatabase, this)); + alterHandler.alterDatabase(getMS(), wh, dbName, newDatabase); success = true; + + for (MetaStoreEventListener listener : listeners) { + AlterDatabaseEvent alterDatabaseEvent = + new AlterDatabaseEvent(oldDatabase, newDatabase, success, this); + listener.onAlterDatabase(alterDatabaseEvent); + } } catch (Exception e) { ex = e; if (e instanceof MetaException) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java index c28c46a..cd766fd 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java @@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent; import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterTableEvent; import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent; @@ -112,6 +113,9 @@ public void onLoadPartitionDone(LoadPartitionDoneEvent partSetDoneEvent) throws } + public void onAlterDatabase (AlterDatabaseEvent databaseEvent) throws MetaException { + } + @Override public Configuration getConf() { return this.conf; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 15a2a81..ac92b3d 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -335,7 +335,7 @@ static public void deleteWHDirectory(Path path, Configuration conf, /** * validateName * - * Checks the name conforms to our standars which are: "[a-zA-z_0-9]+". checks + * Checks the name conforms to our standards which are: "[a-zA-z_0-9]+". checks * this is just characters and numbers and _ * * @param name diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 6ca3f22..daaabea 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -450,6 +450,18 @@ public Database getDatabase(String name) throws NoSuchObjectException { db.setParameters(mdb.getParameters()); return db; } + + public boolean existsDatabase(String name) { + try { + openTransaction(); + getMDatabase(name); + commitTransaction(); + return true; + } catch (NoSuchObjectException e) { + commitTransaction(); + return false; + } + } /** * Alter the database object in metastore. Currently only the parameters @@ -466,8 +478,8 @@ public boolean alterDatabase(String dbName, Database db) boolean committed = false; try { mdb = getMDatabase(dbName); - // currently only allow changing database parameters mdb.setParameters(db.getParameters()); + mdb.setName(db.getName()); openTransaction(); pm.makePersistent(mdb); committed = commitTransaction(); diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index e410c3a..3fc0525 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -417,7 +417,7 @@ public abstract boolean deleteTableColumnStatistics(String dbName, String tableN String colName) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException; - public abstract long cleanupEvents(); + public abstract long cleanupEvents(); public abstract boolean addToken(String tokenIdentifier, String delegationToken); @@ -436,4 +436,5 @@ public abstract void updateMasterKey(Integer seqNo, String key) public abstract String[] getMasterKeys(); + public boolean existsDatabase(String name); } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java metastore/src/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java new file mode 100644 index 0000000..a6306f5 --- /dev/null +++ metastore/src/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java @@ -0,0 +1,24 @@ +package org.apache.hadoop.hive.metastore.events; + +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.api.Database; + +public class AlterDatabaseEvent extends ListenerEvent { + + private final Database oldDatabase; + private final Database newDatabase; + + public AlterDatabaseEvent(Database oldDatabase, Database newDatabase, boolean status, HiveMetaStore.HMSHandler handler) { + super(status, handler); + this.oldDatabase = oldDatabase; + this.newDatabase = newDatabase; + } + + public Database getOldDatabase() { + return oldDatabase; + } + + public Database getNewDatabase() { + return newDatabase; + } +} diff --git metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAlterDatabaseEvent.java metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAlterDatabaseEvent.java new file mode 100644 index 0000000..9988fe5 --- /dev/null +++ metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAlterDatabaseEvent.java @@ -0,0 +1,24 @@ +package org.apache.hadoop.hive.metastore.events; + +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.api.Database; + +public class PreAlterDatabaseEvent extends PreEventContext { + + private final Database oldDatabase; + private final Database newDatabase; + + public PreAlterDatabaseEvent(Database oldDatabase, Database newDatabase, HiveMetaStore.HMSHandler handler) { + super(PreEventType.ALTER_DATABASE, handler); + this.oldDatabase = oldDatabase; + this.newDatabase = newDatabase; + } + + public Database getOldDatabase() { + return oldDatabase; + } + + public Database getNewDatabase() { + return newDatabase; + } +} diff --git metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java index 5021a73..5a89a75 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java @@ -36,7 +36,8 @@ ALTER_PARTITION, CREATE_DATABASE, DROP_DATABASE, - LOAD_PARTITION_DONE + LOAD_PARTITION_DONE, + ALTER_DATABASE } private final PreEventType eventType; diff --git metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java index 8066784..1e39bce 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java @@ -484,6 +484,11 @@ public long cleanupEvents() { } @Override + public boolean existsDatabase(String name) { + return objectStore.existsDatabase(name); + } + + @Override public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName) throws MetaException, NoSuchObjectException, InvalidInputException { diff --git metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index 0f9b16c..7acaf88 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -554,6 +554,12 @@ public boolean removeMasterKey(Integer keySeq) { } @Override + public boolean existsDatabase(String name) { + + return false; + } + + @Override public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName) throws MetaException, NoSuchObjectException { return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 4dcb260..01b602c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -852,24 +852,33 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) { } private int alterDatabase(AlterDatabaseDesc alterDbDesc) throws HiveException { + String oldName = alterDbDesc.getOldName(); + Database database = db.getDatabase(oldName); + if (database == null) { + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, oldName); + } - String dbName = alterDbDesc.getDatabaseName(); - Database database = db.getDatabase(dbName); - Map newParams = alterDbDesc.getDatabaseProperties(); - - if (database != null) { - Map params = database.getParameters(); - // if both old and new params are not null, merge them - if (params != null && newParams != null) { - params.putAll(newParams); - database.setParameters(params); - } else { // if one of them is null, replace the old params with the new one - database.setParameters(newParams); - } - db.alterDatabase(database.getName(), database); - } else { - throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); + switch (alterDbDesc.getOp()) { + case ADDPROPS: + Map newParams = alterDbDesc.getDatabaseProperties(); + Map params = database.getParameters(); + // if both old and new params are not null, merge them + if (params != null && newParams != null) { + params.putAll(newParams); + database.setParameters(params); + } else { // if one of them is null, replace the old params with the new one + database.setParameters(newParams); + } + break; + + case RENAME: + database.setName(alterDbDesc.getNewName()); + break; + + default: + throw new HiveException("ERROR: The operation " + alterDbDesc.getOp().name() + " does not exist."); } + db.alterDatabase(oldName, database); return 0; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 932ec66..4e7c9c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -376,7 +376,10 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { analyzeSwitchDatabase(ast); break; case HiveParser.TOK_ALTERDATABASE_PROPERTIES: - analyzeAlterDatabase(ast); + analyzeAlterDatabaseProps(ast); + break; + case HiveParser.TOK_ALTERDATABASE_RENAME: + analyzeAlterDatabaseRename(ast); break; case HiveParser.TOK_CREATEROLE: analyzeCreateRole(ast); @@ -644,8 +647,7 @@ private void analyzeShowRoleGrant(ASTNode ast) { createRoleDesc), conf)); } - private void analyzeAlterDatabase(ASTNode ast) throws SemanticException { - + private void analyzeAlterDatabaseProps(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); Map dbProps = null; @@ -660,12 +662,18 @@ private void analyzeAlterDatabase(ASTNode ast) throws SemanticException { } } - // currently alter database command can only change properties AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbName, null, null, false); alterDesc.setDatabaseProperties(dbProps); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc), conf)); + } + private void analyzeAlterDatabaseRename(ASTNode ast) throws SemanticException { + String oldName = unescapeIdentifier(ast.getChild(0).getText()); + String newName = unescapeIdentifier(ast.getChild(1).getText()); + AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(oldName, newName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc), + conf)); } private void analyzeExchangePartition(ASTNode ast) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 36d62a6..a58a763 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -277,6 +277,7 @@ TOK_DATABASEPROPERTIES; TOK_DATABASELOCATION; TOK_DBPROPLIST; TOK_ALTERDATABASE_PROPERTIES; +TOK_ALTERDATABASE_RENAME; TOK_ALTERTABLE_ALTERPARTS_MERGEFILES; TOK_TABNAME; TOK_TABSRC; @@ -910,7 +911,15 @@ alterIndexStatementSuffix alterDatabaseStatementSuffix @init { msgs.push("alter database statement"); } @after { msgs.pop(); } - : alterDatabaseSuffixProperties + : alterDatabaseSuffixRename + | alterDatabaseSuffixProperties + ; + +alterDatabaseSuffixRename +@init { msgs.push("rename database statement"); } +@after { msgs.pop(); } + : oldName=identifier KW_RENAME KW_TO newName=identifier + -> ^(TOK_ALTERDATABASE_RENAME $oldName $newName) ; alterDatabaseSuffixProperties diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 97454e4..846e31f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -93,7 +93,8 @@ commandType.put(HiveParser.TOK_GRANT_ROLE, HiveOperation.GRANT_ROLE); commandType.put(HiveParser.TOK_REVOKE_ROLE, HiveOperation.REVOKE_ROLE); commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT); - commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE); + commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE_PROPS); + commandType.put(HiveParser.TOK_ALTERDATABASE_RENAME, HiveOperation.ALTERDATABASE_RENAME); commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE); commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED, HiveOperation.ALTERTABLE_SKEWED); commandType.put(HiveParser.TOK_ANALYZE, HiveOperation.ANALYZE_TABLE); @@ -203,6 +204,7 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_REVOKE_ROLE: case HiveParser.TOK_SHOW_ROLE_GRANT: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: + case HiveParser.TOK_ALTERDATABASE_RENAME: case HiveParser.TOK_ALTERTABLE_SKEWED: case HiveParser.TOK_TRUNCATETABLE: case HiveParser.TOK_EXCHANGEPARTITION: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java index 7e081d2..42e8536 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java @@ -25,12 +25,18 @@ * AlterDatabaseDesc. * */ -@Explain(displayName = "Create Database") +@Explain(displayName = "Alter Database") public class AlterDatabaseDesc extends DDLDesc implements Serializable { private static final long serialVersionUID = 1L; - String databaseName; + public static enum AlterDatabaseTypes { + RENAME, ADDPROPS + } + + AlterDatabaseTypes op; + String oldName; + String newName; String locationUri; String comment; boolean ifNotExists; @@ -42,10 +48,11 @@ public AlterDatabaseDesc() { } - public AlterDatabaseDesc(String databaseName, String comment, + public AlterDatabaseDesc(String oldName, String comment, String locationUri, boolean ifNotExists) { super(); - this.databaseName = databaseName; + op = AlterDatabaseTypes.ADDPROPS; + this.oldName = oldName; this.comment = comment; this.locationUri = locationUri; this.ifNotExists = ifNotExists; @@ -55,8 +62,12 @@ public AlterDatabaseDesc(String databaseName, String comment, public AlterDatabaseDesc(String databaseName, boolean ifNotExists) { this(databaseName, null, null, ifNotExists); } - - + + public AlterDatabaseDesc(String oldName, String newName) { + op = AlterDatabaseTypes.RENAME; + this.oldName = oldName; + this.newName = newName; + } @Explain(displayName="if not exists") public boolean getIfNotExists() { @@ -76,12 +87,12 @@ public void setDatabaseProperties(Map dbProps) { } @Explain(displayName="name") - public String getDatabaseName() { - return databaseName; + public String getOldName() { + return oldName; } - public void setDatabaseName(String databaseName) { - this.databaseName = databaseName; + public void setOldName(String oldName) { + this.oldName = oldName; } @Explain(displayName="comment") @@ -101,4 +112,20 @@ public String getLocationUri() { public void setLocationUri(String locationUri) { this.locationUri = locationUri; } + + public AlterDatabaseTypes getOp() { + return op; + } + + public void setOp(AlterDatabaseTypes op) { + this.op = op; + } + + public String getNewName() { + return newName; + } + + public void setNewName(String newName) { + this.newName = newName; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 69cda05..8a72d2d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -96,7 +96,7 @@ CREATETABLE_AS_SELECT("CREATETABLE_AS_SELECT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.CREATE}), QUERY("QUERY", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA, Privilege.CREATE}), ALTERINDEX_PROPS("ALTERINDEX_PROPS",null, null), - ALTERDATABASE("ALTERDATABASE", null, null), + ALTERDATABASE_PROPS("ALTERDATABASE_PROPS", null, null), DESCDATABASE("DESCDATABASE", null, null), ALTERTABLE_MERGEFILES("ALTER_TABLE_MERGE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }), ALTERPARTITION_MERGEFILES("ALTER_PARTITION_MERGE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }), @@ -104,6 +104,7 @@ ALTERTBLPART_SKEWED_LOCATION("ALTERTBLPART_SKEWED_LOCATION", new Privilege[] {Privilege.ALTER_DATA}, null), ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[] {Privilege.ALTER_METADATA}, null), + ALTERDATABASE_RENAME("ALTERDATABASE_RENAME", null, null), ; private String operationName; diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java index 9a90549..7831230 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreAlterDatabaseEvent; import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent; import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent; @@ -104,6 +105,9 @@ public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectE case DROP_DATABASE: authorizeDropDatabase((PreDropDatabaseEvent)context); break; + case ALTER_DATABASE: + authorizeAlterDatabase((PreAlterDatabaseEvent)context); + break; case LOAD_PARTITION_DONE: // noop for now break; @@ -139,6 +143,19 @@ private void authorizeDropDatabase(PreDropDatabaseEvent context) } } + private void authorizeAlterDatabase(PreAlterDatabaseEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(new Database(context.getOldDatabase()), + null, + new Privilege[]{Privilege.ALTER_METADATA}); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + private void authorizeCreateTable(PreCreateTableEvent context) throws InvalidOperationException, MetaException { try { diff --git ql/src/test/queries/clientpositive/alter_rename_database1.q ql/src/test/queries/clientpositive/alter_rename_database1.q new file mode 100644 index 0000000..f693e89 --- /dev/null +++ ql/src/test/queries/clientpositive/alter_rename_database1.q @@ -0,0 +1,12 @@ +SHOW DATABASES; + +CREATE DATABASE alter_db; +USE alter_db; +SHOW DATABASES; + +ALTER DATABASE alter_db RENAME TO new_alter_db; +SHOW DATABASES; + +USE default; +DROP DATABASE new_alter_db; +SHOW DATABASES; diff --git ql/src/test/queries/clientpositive/alter_rename_database2.q ql/src/test/queries/clientpositive/alter_rename_database2.q new file mode 100644 index 0000000..e5b6572 --- /dev/null +++ ql/src/test/queries/clientpositive/alter_rename_database2.q @@ -0,0 +1,17 @@ +CREATE DATABASE alter_db; + +CREATE TABLE alter_db.alter1 (col1 string); +CREATE TABLE alter_db.alter2 (col1 string); +LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter_db.alter1; +INSERT INTO TABLE alter_db.alter2 SELECT * FROM alter_db.alter1; + +ALTER DATABASE alter_db RENAME TO new_alter_db; +CREATE DATABASE alter_db; + +CREATE TABLE alter_db.alter2 (col1 string); +INSERT INTO TABLE alter_db.alter2 SELECT * FROM new_alter_db.alter1; +SELECT * FROM alter_db.alter2; +SELECT * FROM new_alter_db.alter2; + +DROP DATABASE alter_db CASCADE; +DROP DATABASE new_alter_db CASCADE; diff --git ql/src/test/results/clientpositive/alter_rename_database1.q.out ql/src/test/results/clientpositive/alter_rename_database1.q.out new file mode 100644 index 0000000..71cb827 --- /dev/null +++ ql/src/test/results/clientpositive/alter_rename_database1.q.out @@ -0,0 +1,42 @@ +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +PREHOOK: query: CREATE DATABASE alter_db +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE alter_db +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: USE alter_db +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: USE alter_db +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +alter_db +default +PREHOOK: query: ALTER DATABASE alter_db RENAME TO new_alter_db +PREHOOK: type: ALTERDATABASE_RENAME +POSTHOOK: query: ALTER DATABASE alter_db RENAME TO new_alter_db +POSTHOOK: type: ALTERDATABASE_RENAME +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +new_alter_db +PREHOOK: query: USE default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: USE default +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: DROP DATABASE new_alter_db +PREHOOK: type: DROPDATABASE +POSTHOOK: query: DROP DATABASE new_alter_db +POSTHOOK: type: DROPDATABASE +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default diff --git ql/src/test/results/clientpositive/alter_rename_database2.q.out ql/src/test/results/clientpositive/alter_rename_database2.q.out new file mode 100644 index 0000000..afc68ca --- /dev/null +++ ql/src/test/results/clientpositive/alter_rename_database2.q.out @@ -0,0 +1,99 @@ +PREHOOK: query: CREATE DATABASE alter_db +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE alter_db +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: CREATE TABLE alter_db.alter1 (col1 string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE alter_db.alter1 (col1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: alter_db@alter1 +PREHOOK: query: CREATE TABLE alter_db.alter2 (col1 string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE alter_db.alter2 (col1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: alter_db@alter2 +PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter_db.alter1 +PREHOOK: type: LOAD +PREHOOK: Output: alter_db@alter1 +POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter_db.alter1 +POSTHOOK: type: LOAD +POSTHOOK: Output: alter_db@alter1 +PREHOOK: query: INSERT INTO TABLE alter_db.alter2 SELECT * FROM alter_db.alter1 +PREHOOK: type: QUERY +PREHOOK: Input: alter_db@alter1 +PREHOOK: Output: alter_db@alter2 +POSTHOOK: query: INSERT INTO TABLE alter_db.alter2 SELECT * FROM alter_db.alter1 +POSTHOOK: type: QUERY +POSTHOOK: Input: alter_db@alter1 +POSTHOOK: Output: alter_db@alter2 +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +PREHOOK: query: ALTER DATABASE alter_db RENAME TO new_alter_db +PREHOOK: type: ALTERDATABASE_RENAME +POSTHOOK: query: ALTER DATABASE alter_db RENAME TO new_alter_db +POSTHOOK: type: ALTERDATABASE_RENAME +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +PREHOOK: query: CREATE DATABASE alter_db +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE alter_db +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE alter_db.alter2 (col1 string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE alter_db.alter2 (col1 string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: alter_db@alter2 +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +PREHOOK: query: INSERT INTO TABLE alter_db.alter2 SELECT * FROM new_alter_db.alter1 +PREHOOK: type: QUERY +PREHOOK: Input: new_alter_db@alter1 +PREHOOK: Output: alter_db@alter2 +POSTHOOK: query: INSERT INTO TABLE alter_db.alter2 SELECT * FROM new_alter_db.alter1 +POSTHOOK: type: QUERY +POSTHOOK: Input: new_alter_db@alter1 +POSTHOOK: Output: alter_db@alter2 +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +PREHOOK: query: SELECT * FROM alter_db.alter2 +PREHOOK: type: QUERY +PREHOOK: Input: alter_db@alter2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM alter_db.alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: alter_db@alter2 +#### A masked pattern was here #### +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +1 +2 +3 +4 +5 +6 +PREHOOK: query: SELECT * FROM new_alter_db.alter2 +PREHOOK: type: QUERY +PREHOOK: Input: new_alter_db@alter2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * FROM new_alter_db.alter2 +POSTHOOK: type: QUERY +POSTHOOK: Input: new_alter_db@alter2 +#### A masked pattern was here #### +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +1 +2 +3 +4 +5 +6 +PREHOOK: query: DROP DATABASE alter_db CASCADE +PREHOOK: type: DROPDATABASE +POSTHOOK: query: DROP DATABASE alter_db CASCADE +POSTHOOK: type: DROPDATABASE +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +PREHOOK: query: DROP DATABASE new_alter_db CASCADE +PREHOOK: type: DROPDATABASE +POSTHOOK: query: DROP DATABASE new_alter_db CASCADE +POSTHOOK: type: DROPDATABASE +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] +POSTHOOK: Lineage: alter2.col1 SIMPLE [(alter1)alter1.FieldSchema(name:col1, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/database_properties.q.out ql/src/test/results/clientpositive/database_properties.q.out index e3e2816..c0c5b47 100644 --- ql/src/test/results/clientpositive/database_properties.q.out +++ ql/src/test/results/clientpositive/database_properties.q.out @@ -31,11 +31,11 @@ POSTHOOK: type: DESCDATABASE PREHOOK: query: alter database db2 set dbproperties ( 'new.property' = 'some new props', 'hive.warehouse.dir' = 'new/warehouse/dir') -PREHOOK: type: ALTERDATABASE +PREHOOK: type: ALTERDATABASE_PROPS POSTHOOK: query: alter database db2 set dbproperties ( 'new.property' = 'some new props', 'hive.warehouse.dir' = 'new/warehouse/dir') -POSTHOOK: type: ALTERDATABASE +POSTHOOK: type: ALTERDATABASE_PROPS PREHOOK: query: describe database extended db2 PREHOOK: type: DESCDATABASE POSTHOOK: query: describe database extended db2