diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java index 4d2f3d8..a956c82 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java @@ -23,6 +23,9 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.ASTNode; @@ -30,8 +33,6 @@ import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hive.hcatalog.common.HCatConstants; @@ -84,12 +85,13 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, } @Override - protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context, - Hive hive, DDLWork work) throws HiveException { - CreateDatabaseDesc createDb = work.getCreateDatabaseDesc(); - if (createDb != null) { + protected void authorizeDDLWork2(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork2 work) + throws HiveException { + DDLDesc ddlDesc = work.getDDLDesc(); + if (ddlDesc instanceof CreateDatabaseDesc) { + CreateDatabaseDesc createDb = (CreateDatabaseDesc)ddlDesc; Database db = new Database(createDb.getName(), createDb.getComment(), - createDb.getLocationUri(), createDb.getDatabaseProperties()); + createDb.getLocationUri(), createDb.getDatabaseProperties()); authorize(db, Privilege.CREATE); } } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 8105e8b..1002db7 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -21,6 +21,12 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.DescDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.DropDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.ShowDatabasesDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -32,16 +38,12 @@ import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; -import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; -import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.common.ErrorType; @@ -273,37 +275,35 @@ private String extractTableName(String compoundName) { } @Override - protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) - throws HiveException { - // DB opereations, none of them are enforced by Hive right now. - - ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc(); - if (showDatabases != null) { + protected void authorizeDDLWork2(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork2 work) + throws HiveException { + DDLDesc ddlDesc = work.getDDLDesc(); + if (ddlDesc instanceof ShowDatabasesDesc) { authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(), - HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges()); - } - - DropDatabaseDesc dropDb = work.getDropDatabaseDesc(); - if (dropDb != null) { + HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges()); + } else if (ddlDesc instanceof DropDatabaseDesc) { + DropDatabaseDesc dropDb = (DropDatabaseDesc)ddlDesc; Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName()); if (db != null){ // if above returned a null, then the db does not exist - probably a // "drop database if exists" clause - don't try to authorize then. authorize(db, Privilege.DROP); } - } - - DescDatabaseDesc descDb = work.getDescDatabaseDesc(); - if (descDb != null) { + } else if (ddlDesc instanceof DescDatabaseDesc) { + DescDatabaseDesc descDb = (DescDatabaseDesc)ddlDesc; Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName()); authorize(db, Privilege.SELECT); - } - - SwitchDatabaseDesc switchDb = work.getSwitchDatabaseDesc(); - if (switchDb != null) { + } else if (ddlDesc instanceof SwitchDatabaseDesc) { + SwitchDatabaseDesc switchDb = (SwitchDatabaseDesc)ddlDesc; Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName()); authorize(db, Privilege.SELECT); } + } + + @Override + protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) + throws HiveException { + // DB opereations, none of them are enforced by Hive right now. ShowTablesDesc showTables = work.getShowTblsDesc(); if (showTables != null) { diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java index e359d53..855094f 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -102,6 +103,11 @@ protected void authorizeDDL(HiveSemanticAnalyzerHookContext context, if (work != null) { authorizeDDLWork(context, hive, work); } + } else if (task.getWork() instanceof DDLWork2) { + DDLWork2 work = (DDLWork2) task.getWork(); + if (work != null) { + authorizeDDLWork2(context, hive, work); + } } } } catch (SemanticException ex) { @@ -122,6 +128,13 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context, Hive hive, DDLWork work) throws HiveException { } + /** + * Authorized the given DDLWork2. It is only for the interim time while DDLTask and DDLWork are being refactored. + */ + protected void authorizeDDLWork2(HiveSemanticAnalyzerHookContext context, + Hive hive, DDLWork2 work) throws HiveException { + } + protected void authorize(Privilege[] inputPrivs, Privilege[] outputPrivs) throws AuthorizationException, SemanticException { try { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 3639ab1..d265234 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -935,7 +935,7 @@ public void testIncrementalDumpEmptyDumpDirectory() throws Throwable { CommandProcessorResponse response = replica.runCommand("REPL LOAD someJunkDB from '" + tuple.dumpLocation + "'"); assertTrue(response.getErrorMessage().toLowerCase() - .contains("org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: someJunkDB" + .contains("org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. Database does not exist: someJunkDB" .toLowerCase())); // Bootstrap load from an empty dump directory should return empty load directory error. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index b02cdf8..e7fd687 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -79,7 +79,6 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AggrStats; -import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.CompactionResponse; @@ -177,7 +176,6 @@ import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; -import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; @@ -188,7 +186,6 @@ import org.apache.hadoop.hive.ql.plan.AlterWMTriggerDesc; import org.apache.hadoop.hive.ql.plan.CacheMetadataDesc; import org.apache.hadoop.hive.ql.plan.ColStatistics; -import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMMappingDesc; import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc; import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc; @@ -198,10 +195,8 @@ import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; -import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc; @@ -214,7 +209,6 @@ import org.apache.hadoop.hive.ql.plan.KillQueryDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.LoadMultiFilesDesc; -import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; @@ -232,7 +226,6 @@ import org.apache.hadoop.hive.ql.plan.ShowConfDesc; import org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; -import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; @@ -242,10 +235,8 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc; import org.apache.hadoop.hive.ql.plan.ShowTxnsDesc; -import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; @@ -351,41 +342,6 @@ public int execute(DriverContext driverContext) { try { db = Hive.get(conf); - CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc(); - if (null != createDatabaseDesc) { - return createDatabase(db, createDatabaseDesc); - } - - DropDatabaseDesc dropDatabaseDesc = work.getDropDatabaseDesc(); - if (dropDatabaseDesc != null) { - return dropDatabase(db, dropDatabaseDesc); - } - - LockDatabaseDesc lockDatabaseDesc = work.getLockDatabaseDesc(); - if (lockDatabaseDesc != null) { - return lockDatabase(db, lockDatabaseDesc); - } - - UnlockDatabaseDesc unlockDatabaseDesc = work.getUnlockDatabaseDesc(); - if (unlockDatabaseDesc != null) { - return unlockDatabase(db, unlockDatabaseDesc); - } - - SwitchDatabaseDesc switchDatabaseDesc = work.getSwitchDatabaseDesc(); - if (switchDatabaseDesc != null) { - return switchDatabase(db, switchDatabaseDesc); - } - - DescDatabaseDesc descDatabaseDesc = work.getDescDatabaseDesc(); - if (descDatabaseDesc != null) { - return descDatabase(db, descDatabaseDesc); - } - - AlterDatabaseDesc alterDatabaseDesc = work.getAlterDatabaseDesc(); - if (alterDatabaseDesc != null) { - return alterDatabase(db, alterDatabaseDesc); - } - CreateTableDesc crtTbl = work.getCreateTblDesc(); if (crtTbl != null) { return createTable(db, crtTbl); @@ -462,11 +418,6 @@ public int execute(DriverContext driverContext) { return describeFunction(db, descFunc); } - ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc(); - if (showDatabases != null) { - return showDatabases(db, showDatabases); - } - ShowTablesDesc showTbls = work.getShowTblsDesc(); if (showTbls != null) { return showTablesOrViews(db, showTbls); @@ -1174,69 +1125,6 @@ private void writeListToFileAfterSort(List entries, String resFile) thro writeToFile(sb.toString(), resFile); } - private int alterDatabase(Hive db, AlterDatabaseDesc alterDbDesc) throws HiveException { - - String dbName = alterDbDesc.getDatabaseName(); - Database database = db.getDatabase(dbName); - if (database == null) { - throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); - } - - Map params = database.getParameters(); - if ((null != alterDbDesc.getReplicationSpec()) - && !alterDbDesc.getReplicationSpec().allowEventReplacementInto(params)) { - LOG.debug("DDLTask: Alter Database {} is skipped as database is newer than update", dbName); - return 0; // no replacement, the existing database state is newer than our update. - } - - switch (alterDbDesc.getAlterType()) { - case ALTER_PROPERTY: - Map newParams = alterDbDesc.getDatabaseProperties(); - - // if both old and new params are not null, merge them - if (params != null && newParams != null) { - params.putAll(newParams); - database.setParameters(params); - } else { - // if one of them is null, replace the old params with the new one - database.setParameters(newParams); - } - break; - - case ALTER_OWNER: - database.setOwnerName(alterDbDesc.getOwnerPrincipal().getName()); - database.setOwnerType(alterDbDesc.getOwnerPrincipal().getType()); - break; - - case ALTER_LOCATION: - try { - String newLocation = alterDbDesc.getLocation(); - URI locationURI = new URI(newLocation); - if ( !locationURI.isAbsolute() - || StringUtils.isBlank(locationURI.getScheme())) { - throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); - } - if (newLocation.equals(database.getLocationUri())) { - LOG.info("AlterDatabase skipped. No change in location."); - } - else { - database.setLocationUri(newLocation); - } - } - catch (URISyntaxException e) { - throw new HiveException(e); - } - break; - - default: - throw new AssertionError("Unsupported alter database type! : " + alterDbDesc.getAlterType()); - } - - db.alterDatabase(database.getName(), database); - return 0; - } - - /** * Alters a materialized view. * @@ -2506,39 +2394,6 @@ public static StringBuilder appendSerdeParams( return builder; } - - /** - * Write a list of the available databases to a file. - * - * @param showDatabasesDesc - * These are the databases we're interested in. - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * Throws this exception if an unexpected error occurs. - */ - private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws HiveException { - // get the databases for the desired pattern - populate the output stream - List databases = null; - if (showDatabasesDesc.getPattern() != null) { - LOG.debug("pattern: {}", showDatabasesDesc.getPattern()); - databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); - } else { - databases = db.getAllDatabases(); - } - LOG.info("Found {} database(s) matching the SHOW DATABASES statement.", databases.size()); - - // write the results in the file - DataOutputStream outStream = getOutputStream(showDatabasesDesc.getResFile()); - try { - formatter.showDatabases(outStream, databases); - } catch (Exception e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show databases"); - } finally { - IOUtils.closeStream(outStream); - } - return 0; - } - /** * Write a list of the tables/views in the database to a file. * @@ -3094,36 +2949,6 @@ private int lockTable(Hive db, LockTableDesc lockTbl) throws HiveException { } /** - * Lock the database - * - * @param lockDb - * the database to be locked along with the mode - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * Throws this exception if an unexpected error occurs. - */ - private int lockDatabase(Hive db, LockDatabaseDesc lockDb) throws HiveException { - Context ctx = driverContext.getCtx(); - HiveTxnManager txnManager = ctx.getHiveTxnManager(); - return txnManager.lockDatabase(db, lockDb); - } - - /** - * Unlock the database specified - * - * @param unlockDb - * the database to be unlocked - * @return Returns 0 when execution succeeds and above 0 if it fails. - * @throws HiveException - * Throws this exception if an unexpected error occurs. - */ - private int unlockDatabase(Hive db, UnlockDatabaseDesc unlockDb) throws HiveException { - Context ctx = driverContext.getCtx(); - HiveTxnManager txnManager = ctx.getHiveTxnManager(); - return txnManager.unlockDatabase(db, unlockDb); - } - - /** * Unlock the table/partition specified * @param db * @@ -3213,43 +3038,6 @@ private int describeFunction(Hive db, DescFunctionDesc descFunc) throws HiveExce return 0; } - private int descDatabase(Hive db, DescDatabaseDesc descDatabase) throws HiveException { - DataOutputStream outStream = getOutputStream(descDatabase.getResFile()); - try { - Database database = db.getDatabase(descDatabase.getDatabaseName()); - - if (database == null) { - throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, descDatabase.getDatabaseName()); - } - Map params = null; - if (descDatabase.isExt()) { - params = database.getParameters(); - } - - // If this is a q-test, let's order the params map (lexicographically) by - // key. This is to get consistent param ordering between Java7 and Java8. - if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_IN_TEST) && - params != null) { - params = new TreeMap(params); - } - - String location = database.getLocationUri(); - if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_IN_TEST)) { - location = "location/in/test"; - } - PrincipalType ownerType = database.getOwnerType(); - formatter.showDatabaseDescription(outStream, database.getName(), - database.getDescription(), location, - database.getOwnerName(), (null == ownerType) ? null : ownerType.name(), params); - - } catch (Exception e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR); - } finally { - IOUtils.closeStream(outStream); - } - return 0; - } - /** * Write the status of tables to a file. * @@ -4591,96 +4379,6 @@ public static void validateSerDe(String serdeName, HiveConf conf) throws HiveExc } /** - * Create a Database - * @param db - * @param crtDb - * @return Always returns 0 - * @throws HiveException - */ - private int createDatabase(Hive db, CreateDatabaseDesc crtDb) - throws HiveException { - Database database = new Database(); - database.setName(crtDb.getName()); - database.setDescription(crtDb.getComment()); - database.setLocationUri(crtDb.getLocationUri()); - database.setParameters(crtDb.getDatabaseProperties()); - database.setOwnerName(SessionState.getUserFromAuthenticator()); - database.setOwnerType(PrincipalType.USER); - try { - makeLocationQualified(database); - db.createDatabase(database, crtDb.getIfNotExists()); - } - catch (AlreadyExistsException ex) { - //it would be better if AlreadyExistsException had an errorCode field.... - throw new HiveException(ex, ErrorMsg.DATABASE_ALREADY_EXISTS, crtDb.getName()); - } - return 0; - } - - /** - * Drop a Database - * @param db - * @param dropDb - * @return Always returns 0 - * @throws HiveException - */ - private int dropDatabase(Hive db, DropDatabaseDesc dropDb) - throws HiveException { - try { - String dbName = dropDb.getDatabaseName(); - ReplicationSpec replicationSpec = dropDb.getReplicationSpec(); - if (replicationSpec.isInReplicationScope()) { - Database database = db.getDatabase(dbName); - if (database == null - || !replicationSpec.allowEventReplacementInto(database.getParameters())) { - return 0; - } - } - db.dropDatabase(dbName, true, dropDb.getIfExists(), dropDb.isCasdade()); - // Unregister the functions as well - if (dropDb.isCasdade()) { - FunctionRegistry.unregisterPermanentFunctions(dbName); - } - } catch (NoSuchObjectException ex) { - throw new HiveException(ex, ErrorMsg.DATABASE_NOT_EXISTS, dropDb.getDatabaseName()); - } - return 0; - } - - /** - * Switch to a different Database - * @param db - * @param switchDb - * @return Always returns 0 - * @throws HiveException - */ - private int switchDatabase(Hive db, SwitchDatabaseDesc switchDb) - throws HiveException { - String dbName = switchDb.getDatabaseName(); - if (!db.databaseExists(dbName)) { - throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); - } - SessionState.get().setCurrentDatabase(dbName); - - // set database specific parameters - Database database = db.getDatabase(dbName); - assert(database != null); - Map dbParams = database.getParameters(); - if (dbParams != null) { - for (HiveConf.ConfVars var: HiveConf.dbVars) { - String newValue = dbParams.get(var.varname); - if (newValue != null) { - LOG.info("Changing {} from {} to {}", var.varname, conf.getVar(var), - newValue); - conf.setVar(var, newValue); - } - } - } - - return 0; - } - - /** * Create a new table. * * @param db @@ -5115,25 +4813,6 @@ public static void makeLocationQualified(String databaseName, Table table, HiveC } } - /** - * Make qualified location for a database . - * - * @param database - * Database. - */ - public static final String DATABASE_PATH_SUFFIX = ".db"; - private void makeLocationQualified(Database database) throws HiveException { - if (database.isSetLocationUri()) { - database.setLocationUri(Utilities.getQualifiedPath(conf, new Path(database.getLocationUri()))); - } - else { - // Location is not set we utilize METASTOREWAREHOUSE together with database name - database.setLocationUri( - Utilities.getQualifiedPath(conf, new Path(HiveConf.getVar(conf, HiveConf.ConfVars.METASTOREWAREHOUSE), - database.getName().toLowerCase() + DATABASE_PATH_SUFFIX))); - } - } - /** * Validate if the given table/partition is eligible for update * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExportTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExportTask.java index 078691c..ae9d040 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExportTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExportTask.java @@ -44,7 +44,7 @@ public String getName() { } @Override - protected int execute(DriverContext driverContext) { + public int execute(DriverContext driverContext) { try { // Also creates the root directory TableExport.Paths exportPaths = new TableExport.Paths( diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java index 179f291..4706945 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java @@ -62,7 +62,7 @@ public ReplCopyTask(){ } @Override - protected int execute(DriverContext driverContext) { + public int execute(DriverContext driverContext) { LOG.debug("ReplCopyTask.execute()"); FileSystem dstFs = null; Path toPath = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 3308797..9627c35 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -225,7 +225,7 @@ public int executeTask(HiveHistory hiveHistory) { * * @return status of executing the task */ - protected abstract int execute(DriverContext driverContext); + public abstract int execute(DriverContext driverContext); public boolean isRootTask() { return rootTask; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java index 8e6fdc0..259cf97 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java @@ -23,6 +23,8 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask; import org.apache.hadoop.hive.ql.exec.repl.ReplDumpTask; @@ -88,6 +90,7 @@ public TaskTuple(Class workClass, Class> taskClass) { taskvec.add(new TaskTuple(CopyWork.class, CopyTask.class)); taskvec.add(new TaskTuple(ReplCopyWork.class, ReplCopyTask.class)); taskvec.add(new TaskTuple(DDLWork.class, DDLTask.class)); + taskvec.add(new TaskTuple(DDLWork2.class, DDLTask2.class)); taskvec.add(new TaskTuple( MaterializedViewDesc.class, MaterializedViewTask.class)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLDesc.java new file mode 100644 index 0000000..98762c6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLDesc.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl; + +/** + * Marker interface for all DDL operation descriptors. + */ +public interface DDLDesc { +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLOperation.java new file mode 100644 index 0000000..301a1a6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLOperation.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl; + +import java.io.DataOutputStream; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Abstract ancestor class of all DDL Operation classes. + */ +public abstract class DDLOperation { + protected static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.DDLTask"); + + protected final DDLOperationContext context; + + public DDLOperation(DDLOperationContext context) { + this.context = context; + } + + public abstract int execute() throws HiveException; + + protected DataOutputStream getOutputStream(Path outputFile) throws HiveException { + try { + FileSystem fs = outputFile.getFileSystem(context.getConf()); + return fs.create(outputFile); + } catch (Exception e) { + throw new HiveException(e); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLOperationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLOperationContext.java new file mode 100644 index 0000000..208a45e --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLOperationContext.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; + +/** + * Context for DDL operations. + */ +public class DDLOperationContext { + private final Hive db; + private final HiveConf conf; + private final DriverContext driverContext; + private final MetaDataFormatter formatter; + + public DDLOperationContext(HiveConf conf, DriverContext driverContext) throws HiveException { + this.db = Hive.get(conf); + this.conf = conf; + this.driverContext = driverContext; + this.formatter = MetaDataFormatUtils.getFormatter(conf); + } + + public Hive getDb() { + return db; + } + + public HiveConf getConf() { + return conf; + } + + public DriverContext getDriverContext() { + return driverContext; + } + + public MetaDataFormatter getFormatter() { + return formatter; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLTask2.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLTask2.java new file mode 100644 index 0000000..4a14c41 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLTask2.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl; + +import java.io.Serializable; +import java.lang.reflect.Constructor; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; +import org.apache.hadoop.hive.ql.plan.api.StageType; + +/** + * DDLTask implementation. +**/ +public final class DDLTask2 extends Task implements Serializable { + private static final long serialVersionUID = 1L; + + private static final Map, Class> DESC_TO_OPARATION = + new HashMap<>(); + public static void registerOperation(Class descClass, + Class operationClass) { + DESC_TO_OPARATION.put(descClass, operationClass); + } + + @Override + public boolean requireLock() { + return this.work != null && this.work.getNeedLock(); + } + + @Override + public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, + CompilationOpContext opContext) { + super.initialize(queryState, queryPlan, ctx, opContext); + } + + @Override + public int execute(DriverContext driverContext) { + if (driverContext.getCtx().getExplainAnalyze() == AnalyzeState.RUNNING) { + return 0; + } + + try { + DDLDesc ddlDesc = work.getDDLDesc(); + + if (DESC_TO_OPARATION.containsKey(ddlDesc.getClass())) { + DDLOperationContext context = new DDLOperationContext(conf, driverContext); + Class ddlOpertaionClass = DESC_TO_OPARATION.get(ddlDesc.getClass()); + Constructor constructor = + ddlOpertaionClass.getConstructor(DDLOperationContext.class, ddlDesc.getClass()); + DDLOperation ddlOperation = constructor.newInstance(context, ddlDesc); + return ddlOperation.execute(); + } else { + throw new IllegalArgumentException("Unknown DDL request: " + ddlDesc.getClass()); + } + } catch (Throwable e) { + failed(e); + return 1; + } + } + + private void failed(Throwable e) { + while (e.getCause() != null && e.getClass() == RuntimeException.class) { + e = e.getCause(); + } + setException(e); + LOG.error("Failed", e); + } + + @Override + public StageType getType() { + return StageType.DDL; + } + + @Override + public String getName() { + return "DDL"; + } + + /* + uses the authorizer from SessionState will need some more work to get this to run in parallel, + however this should not be a bottle neck so might not need to parallelize this. + */ + @Override + public boolean canExecuteInParallel() { + return false; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLWork2.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLWork2.java new file mode 100644 index 0000000..6223b28 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/DDLWork2.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.exec.ddl; + +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; + +import java.io.Serializable; + +import java.util.Set; + +/** + * A DDL operation. + */ +public final class DDLWork2 implements Serializable { + private static final long serialVersionUID = 1L; + + private DDLDesc ddlDesc; + boolean needLock = false; + + /** ReadEntitites that are passed to the hooks. */ + protected Set inputs; + /** List of WriteEntities that are passed to the hooks. */ + protected Set outputs; + + public DDLWork2() { + } + + public DDLWork2(Set inputs, Set outputs) { + this.inputs = inputs; + this.outputs = outputs; + } + + public DDLWork2(Set inputs, Set outputs, DDLDesc ddlDesc) { + this(inputs, outputs); + this.ddlDesc = ddlDesc; + } + + public Set getInputs() { + return inputs; + } + + public Set getOutputs() { + return outputs; + } + + public boolean getNeedLock() { + return needLock; + } + + public void setNeedLock(boolean needLock) { + this.needLock = needLock; + } + + public DDLDesc getDDLDesc() { + return ddlDesc; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/AlterDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/AlterDatabaseDesc.java new file mode 100644 index 0000000..e4ec6c4 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/AlterDatabaseDesc.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; +import java.util.Map; + +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.plan.PrincipalDesc; + +/** + * DDL task description for ALTER DATABASE commands. + */ +@Explain(displayName = "Alter Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class AlterDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(AlterDatabaseDesc.class, AlterDatabaseOperation.class); + } + + /** + * Supported type of alter db commands. + * Only altering the database property and owner is currently supported + */ + public enum AlterDbType { + ALTER_PROPERTY, ALTER_OWNER, ALTER_LOCATION + }; + + private final AlterDbType alterType; + private final String databaseName; + private final Map dbProperties; + private final ReplicationSpec replicationSpec; + private final PrincipalDesc ownerPrincipal; + private final String location; + + public AlterDatabaseDesc(String databaseName, Map dbProperties, ReplicationSpec replicationSpec) { + this.alterType = AlterDbType.ALTER_PROPERTY; + this.databaseName = databaseName; + this.dbProperties = dbProperties; + this.replicationSpec = replicationSpec; + this.ownerPrincipal = null; + this.location = null; + } + + public AlterDatabaseDesc(String databaseName, PrincipalDesc ownerPrincipal, ReplicationSpec replicationSpec) { + this.alterType = AlterDbType.ALTER_OWNER; + this.databaseName = databaseName; + this.dbProperties = null; + this.replicationSpec = replicationSpec; + this.ownerPrincipal = ownerPrincipal; + this.location = null; + } + + public AlterDatabaseDesc(String databaseName, String location) { + this.alterType = AlterDbType.ALTER_LOCATION; + this.databaseName = databaseName; + this.dbProperties = null; + this.replicationSpec = null; + this.ownerPrincipal = null; + this.location = location; + } + + public AlterDbType getAlterType() { + return alterType; + } + + @Explain(displayName="name", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getDatabaseName() { + return databaseName; + } + + @Explain(displayName="properties") + public Map getDatabaseProperties() { + return dbProperties; + } + + /** + * @return what kind of replication scope this alter is running under. + * This can result in a "ALTER IF NEWER THAN" kind of semantic + */ + public ReplicationSpec getReplicationSpec() { + return this.replicationSpec; + } + + @Explain(displayName="owner") + public PrincipalDesc getOwnerPrincipal() { + return ownerPrincipal; + } + + @Explain(displayName="location") + public String getLocation() { + return location; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/AlterDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/AlterDatabaseOperation.java new file mode 100644 index 0000000..389840b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/AlterDatabaseOperation.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of altering a database. + */ +public class AlterDatabaseOperation extends DDLOperation { + private final AlterDatabaseDesc desc; + + public AlterDatabaseOperation(DDLOperationContext context, AlterDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + String dbName = desc.getDatabaseName(); + Database database = context.getDb().getDatabase(dbName); + if (database == null) { + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); + } + + Map params = database.getParameters(); + if ((null != desc.getReplicationSpec()) && + !desc.getReplicationSpec().allowEventReplacementInto(params)) { + LOG.debug("DDLTask: Alter Database {} is skipped as database is newer than update", dbName); + return 0; // no replacement, the existing database state is newer than our update. + } + + switch (desc.getAlterType()) { + case ALTER_PROPERTY: + alterProperties(database, params); + break; + + case ALTER_OWNER: + alterOwner(database); + break; + + case ALTER_LOCATION: + alterLocation(database); + break; + + default: + throw new AssertionError("Unsupported alter database type! : " + desc.getAlterType()); + } + + context.getDb().alterDatabase(database.getName(), database); + return 0; + } + + private void alterProperties(Database database, Map params) { + Map newParams = desc.getDatabaseProperties(); + + // if both old and new params are not null, merge them + if (params != null && newParams != null) { + params.putAll(newParams); + database.setParameters(params); + } else { + // if one of them is null, replace the old params with the new one + database.setParameters(newParams); + } + } + + private void alterOwner(Database database) { + database.setOwnerName(desc.getOwnerPrincipal().getName()); + database.setOwnerType(desc.getOwnerPrincipal().getType()); + } + + private void alterLocation(Database database) throws HiveException { + try { + String newLocation = desc.getLocation(); + URI locationURI = new URI(newLocation); + if (!locationURI.isAbsolute() || StringUtils.isBlank(locationURI.getScheme())) { + throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); + } + + if (newLocation.equals(database.getLocationUri())) { + LOG.info("AlterDatabase skipped. No change in location."); + } else { + database.setLocationUri(newLocation); + } + } catch (URISyntaxException e) { + throw new HiveException(e); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/CreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/CreateDatabaseDesc.java new file mode 100644 index 0000000..0087d4d --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/CreateDatabaseDesc.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; +import java.util.Map; + +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for CREATE DATABASE commands. + */ +@Explain(displayName = "Create Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class CreateDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(CreateDatabaseDesc.class, CreateDatabaseOperation.class); + } + + private final String databaseName; + private final String comment; + private final String locationUri; + private final boolean ifNotExists; + private final Map dbProperties; + + public CreateDatabaseDesc(String databaseName, String comment, String locationUri, boolean ifNotExists, + Map dbProperties) { + this.databaseName = databaseName; + this.comment = comment; + this.locationUri = locationUri; + this.ifNotExists = ifNotExists; + this.dbProperties = dbProperties; + } + + @Explain(displayName="if not exists", displayOnlyOnTrue = true) + public boolean getIfNotExists() { + return ifNotExists; + } + + public Map getDatabaseProperties() { + return dbProperties; + } + + @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getName() { + return databaseName; + } + + @Explain(displayName="comment") + public String getComment() { + return comment; + } + + @Explain(displayName="locationUri") + public String getLocationUri() { + return locationUri; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/CreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/CreateDatabaseOperation.java new file mode 100644 index 0000000..cdad0e4 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/CreateDatabaseOperation.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Operation process of creating a database. + */ +public class CreateDatabaseOperation extends DDLOperation { + private static final String DATABASE_PATH_SUFFIX = ".db"; + + private final CreateDatabaseDesc desc; + + public CreateDatabaseOperation(DDLOperationContext context, CreateDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Database database = new Database(); + database.setName(desc.getName()); + database.setDescription(desc.getComment()); + database.setLocationUri(desc.getLocationUri()); + database.setParameters(desc.getDatabaseProperties()); + database.setOwnerName(SessionState.getUserFromAuthenticator()); + database.setOwnerType(PrincipalType.USER); + + try { + makeLocationQualified(database); + context.getDb().createDatabase(database, desc.getIfNotExists()); + } catch (AlreadyExistsException ex) { + //it would be better if AlreadyExistsException had an errorCode field.... + throw new HiveException(ex, ErrorMsg.DATABASE_ALREADY_EXISTS, desc.getName()); + } + + return 0; + } + + private void makeLocationQualified(Database database) throws HiveException { + if (database.isSetLocationUri()) { + database.setLocationUri(Utilities.getQualifiedPath(context.getConf(), new Path(database.getLocationUri()))); + } else { + // Location is not set we utilize METASTOREWAREHOUSE together with database name + Path path = new Path(HiveConf.getVar(context.getConf(), HiveConf.ConfVars.METASTOREWAREHOUSE), + database.getName().toLowerCase() + DATABASE_PATH_SUFFIX); + String qualifiedPath = Utilities.getQualifiedPath(context.getConf(), path); + database.setLocationUri(qualifiedPath); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DescDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DescDatabaseDesc.java new file mode 100644 index 0000000..df97bb8 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DescDatabaseDesc.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for DESC DATABASE commands. + */ +@Explain(displayName = "Describe Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class DescDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String DESC_DATABASE_SCHEMA = + "db_name,comment,location,owner_name,owner_type,parameters#string:string:string:string:string:string"; + + static { + DDLTask2.registerOperation(DescDatabaseDesc.class, DescDatabaseOperation.class); + } + + private final String resFile; + private final String dbName; + private final boolean isExt; + + public DescDatabaseDesc(Path resFile, String dbName, boolean isExt) { + this.isExt = isExt; + this.resFile = resFile.toString(); + this.dbName = dbName; + } + + public boolean isExt() { + return isExt; + } + + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getDatabaseName() { + return dbName; + } + + @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) + public String getResFile() { + return resFile; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DescDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DescDatabaseOperation.java new file mode 100644 index 0000000..94166a5 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DescDatabaseOperation.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.DataOutputStream; +import java.util.Map; +import java.util.TreeMap; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of describing a database. + */ +public class DescDatabaseOperation extends DDLOperation { + private final DescDatabaseDesc desc; + + public DescDatabaseOperation(DDLOperationContext context, DescDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + try (DataOutputStream outStream = getOutputStream(new Path(desc.getResFile()))) { + Database database = context.getDb().getDatabase(desc.getDatabaseName()); + if (database == null) { + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName()); + } + + Map params = null; + if (desc.isExt()) { + params = database.getParameters(); + } + + // If this is a q-test, let's order the params map (lexicographically) by + // key. This is to get consistent param ordering between Java7 and Java8. + if (HiveConf.getBoolVar(context.getConf(), HiveConf.ConfVars.HIVE_IN_TEST) && params != null) { + params = new TreeMap(params); + } + + String location = database.getLocationUri(); + if (HiveConf.getBoolVar(context.getConf(), HiveConf.ConfVars.HIVE_IN_TEST)) { + location = "location/in/test"; + } + + PrincipalType ownerType = database.getOwnerType(); + context.getFormatter().showDatabaseDescription(outStream, database.getName(), database.getDescription(), + location, database.getOwnerName(), (null == ownerType) ? null : ownerType.name(), params); + } catch (Exception e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR); + } + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DropDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DropDatabaseDesc.java new file mode 100644 index 0000000..177e867 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DropDatabaseDesc.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for DROP DATABASE commands. + */ +@Explain(displayName = "Drop Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class DropDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(DropDatabaseDesc.class, DropDatabaseOperation.class); + } + + private final String databaseName; + private final boolean ifExists; + private final boolean cascade; + private final ReplicationSpec replicationSpec; + + public DropDatabaseDesc(String databaseName, boolean ifExists, ReplicationSpec replicationSpec) { + this(databaseName, ifExists, false, replicationSpec); + } + + public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) { + this.databaseName = databaseName; + this.ifExists = ifExists; + this.cascade = cascade; + this.replicationSpec = replicationSpec; + } + + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getDatabaseName() { + return databaseName; + } + + @Explain(displayName = "if exists") + public boolean getIfExists() { + return ifExists; + } + + public boolean isCasdade() { + return cascade; + } + + public ReplicationSpec getReplicationSpec() { + return replicationSpec; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DropDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DropDatabaseOperation.java new file mode 100644 index 0000000..a2716e6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/DropDatabaseOperation.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.FunctionRegistry; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; + +/** + * Operation process of creating a database. + */ +public class DropDatabaseOperation extends DDLOperation { + private final DropDatabaseDesc desc; + + public DropDatabaseOperation(DDLOperationContext context, DropDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + try { + String dbName = desc.getDatabaseName(); + ReplicationSpec replicationSpec = desc.getReplicationSpec(); + if (replicationSpec.isInReplicationScope()) { + Database database = context.getDb().getDatabase(dbName); + if (database == null || !replicationSpec.allowEventReplacementInto(database.getParameters())) { + return 0; + } + } + + context.getDb().dropDatabase(dbName, true, desc.getIfExists(), desc.isCasdade()); + // Unregister the functions as well + if (desc.isCasdade()) { + FunctionRegistry.unregisterPermanentFunctions(dbName); + } + } catch (NoSuchObjectException ex) { + throw new HiveException(ex, ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName()); + } + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/LockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/LockDatabaseDesc.java new file mode 100644 index 0000000..f5588c7 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/LockDatabaseDesc.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for LOCK DATABASE commands. + */ +@Explain(displayName = "Lock Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class LockDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(LockDatabaseDesc.class, LockDatabaseOperation.class); + } + + private final String databaseName; + private final String mode; + private final String queryId; + private final String queryStr; + + public LockDatabaseDesc(String databaseName, String mode, String queryId, String queryStr) { + this.databaseName = databaseName; + this.mode = mode; + this.queryId = queryId; + this.queryStr = queryStr; + } + + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getDatabaseName() { + return databaseName; + } + + public String getMode() { + return mode; + } + + public String getQueryId() { + return queryId; + } + + public String getQueryStr() { + return queryStr; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/LockDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/LockDatabaseOperation.java new file mode 100644 index 0000000..6902beb --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/LockDatabaseOperation.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of locking a database. + */ +public class LockDatabaseOperation extends DDLOperation { + private final LockDatabaseDesc desc; + + public LockDatabaseOperation(DDLOperationContext context, LockDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Context ctx = context.getDriverContext().getCtx(); + HiveTxnManager txnManager = ctx.getHiveTxnManager(); + return txnManager.lockDatabase(context.getDb(), desc); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/ShowDatabasesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/ShowDatabasesDesc.java new file mode 100644 index 0000000..da33ec9 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/ShowDatabasesDesc.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for SHOW DATABASES commands. + */ +@Explain(displayName = "Show Databases", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class ShowDatabasesDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + /** Thrift ddl for the result of show databases. */ + public static final String SHOW_DATABASES_SCHEMA = "database_name#string"; + + static { + DDLTask2.registerOperation(ShowDatabasesDesc.class, ShowDatabasesOperation.class); + } + + private final String resFile; + private final String pattern; + + public ShowDatabasesDesc(Path resFile) { + this.resFile = resFile.toString(); + this.pattern = null; + } + + public ShowDatabasesDesc(Path resFile, String pattern) { + this.resFile = resFile.toString(); + this.pattern = pattern; + } + + @Explain(displayName = "pattern") + public String getPattern() { + return pattern; + } + + @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) + public String getResFile() { + return resFile; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/ShowDatabasesOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/ShowDatabasesOperation.java new file mode 100644 index 0000000..a44e4c4 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/ShowDatabasesOperation.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.DataOutputStream; +import java.util.List; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.io.IOUtils; + +/** + * Operation process of locking a database. + */ +public class ShowDatabasesOperation extends DDLOperation { + private final ShowDatabasesDesc desc; + + public ShowDatabasesOperation(DDLOperationContext context, ShowDatabasesDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + // get the databases for the desired pattern - populate the output stream + List databases = null; + if (desc.getPattern() != null) { + LOG.debug("pattern: {}", desc.getPattern()); + databases = context.getDb().getDatabasesByPattern(desc.getPattern()); + } else { + databases = context.getDb().getAllDatabases(); + } + + LOG.info("Found {} database(s) matching the SHOW DATABASES statement.", databases.size()); + + // write the results in the file + DataOutputStream outStream = getOutputStream(new Path(desc.getResFile())); + try { + context.getFormatter().showDatabases(outStream, databases); + } catch (Exception e) { + throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show databases"); + } finally { + IOUtils.closeStream(outStream); + } + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/SwitchDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/SwitchDatabaseDesc.java new file mode 100644 index 0000000..4cf5deb --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/SwitchDatabaseDesc.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for USE commands. + */ +@Explain(displayName = "Switch Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class SwitchDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(SwitchDatabaseDesc.class, SwitchDatabaseOperation.class); + } + + private final String databaseName; + + public SwitchDatabaseDesc(String databaseName) { + this.databaseName = databaseName; + } + + @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getDatabaseName() { + return databaseName; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/SwitchDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/SwitchDatabaseOperation.java new file mode 100644 index 0000000..74d7c36 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/SwitchDatabaseOperation.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Operation process of switching to another database. + */ +public class SwitchDatabaseOperation extends DDLOperation { + private final SwitchDatabaseDesc desc; + + public SwitchDatabaseOperation(DDLOperationContext context, SwitchDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + String dbName = desc.getDatabaseName(); + if (!context.getDb().databaseExists(dbName)) { + throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); + } + + SessionState.get().setCurrentDatabase(dbName); + + // set database specific parameters + Database database = context.getDb().getDatabase(dbName); + assert(database != null); + + Map dbParams = database.getParameters(); + if (dbParams != null) { + for (HiveConf.ConfVars var: HiveConf.dbVars) { + String newValue = dbParams.get(var.varname); + if (newValue != null) { + LOG.info("Changing {} from {} to {}", var.varname, context.getConf().getVar(var), newValue); + context.getConf().setVar(var, newValue); + } + } + } + + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/UnlockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/UnlockDatabaseDesc.java new file mode 100644 index 0000000..5a82d18 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/UnlockDatabaseDesc.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import java.io.Serializable; + +import org.apache.hadoop.hive.ql.exec.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.exec.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.Explain.Level; + +/** + * DDL task description for UNLOCK DATABASE commands. + */ +@Explain(displayName = "Unlock Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) +public class UnlockDatabaseDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + static { + DDLTask2.registerOperation(UnlockDatabaseDesc.class, UnlockDatabaseOperation.class); + } + + private final String databaseName; + + public UnlockDatabaseDesc(String databaseName) { + this.databaseName = databaseName; + } + + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getDatabaseName() { + return databaseName; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/UnlockDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/UnlockDatabaseOperation.java new file mode 100644 index 0000000..24ab6d6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/UnlockDatabaseOperation.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.ddl.database; + +import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.exec.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * Operation process of locking a database. + */ +public class UnlockDatabaseOperation extends DDLOperation { + private final UnlockDatabaseDesc desc; + + public UnlockDatabaseOperation(DDLOperationContext context, UnlockDatabaseDesc desc) { + super(context); + this.desc = desc; + } + + @Override + public int execute() throws HiveException { + Context ctx = context.getDriverContext().getCtx(); + HiveTxnManager txnManager = ctx.getHiveTxnManager(); + return txnManager.unlockDatabase(context.getDb(), desc); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/package-info.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/package-info.java new file mode 100644 index 0000000..a1a9e23 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/database/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database related DDL operation descriptions and operations. */ +package org.apache.hadoop.hive.ql.exec.ddl.database; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/package-info.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/package-info.java new file mode 100644 index 0000000..fb2da54 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ddl/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** DDL operations. */ +package org.apache.hadoop.hive.ql.exec.ddl; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java index efecdb8..3bb7c8b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java @@ -69,7 +69,7 @@ private static final int MAX_COPY_RETRY = 5; @Override - protected int execute(DriverContext driverContext) { + public int execute(DriverContext driverContext) { String distCpDoAsUser = conf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER); Path sourcePath = work.fullyQualifiedSourcePath; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java index acfa354..d6fd6dd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java @@ -108,7 +108,7 @@ public String getName() { } @Override - protected int execute(DriverContext driverContext) { + public int execute(DriverContext driverContext) { try { Hive hiveDb = getHive(); Path dumpRoot = new Path(conf.getVar(HiveConf.ConfVars.REPLDIR), getNextDumpDir()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java index 4dc14f4..457e915 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java @@ -74,12 +74,12 @@ public StageType getType() { * by the driver. It does not track details across multiple runs of LoadTask. */ private static class Scope { - boolean database = false, table = false, partition = false; + boolean database = false, table = false; List> rootTasks = new ArrayList<>(); } @Override - protected int execute(DriverContext driverContext) { + public int execute(DriverContext driverContext) { Task rootTask = work.getRootTask(); if (rootTask != null) { rootTask.setChildTasks(null); @@ -332,7 +332,6 @@ private void partitionsPostProcessing(BootstrapEventsIterator iterator, setUpDependencies(tableTracker, partitionsTracker); if (!scope.database && !scope.table) { scope.rootTasks.addAll(partitionsTracker.tasks()); - scope.partition = true; } loadTaskTracker.update(tableTracker); loadTaskTracker.update(partitionsTracker); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 0fd305a..237ecbe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -22,15 +22,15 @@ import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.AlterDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.DatabaseEvent; import org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.util.Context; import org.apache.hadoop.hive.ql.exec.repl.util.TaskTracker; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils.ReplLoadOpType; @@ -120,17 +120,13 @@ private boolean isDbEmpty(String dbName) throws HiveException { } private Task createDbTask(Database dbObj) { - CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(); - createDbDesc.setName(dbObj.getName()); - createDbDesc.setComment(dbObj.getDescription()); - createDbDesc.setDatabaseProperties(updateDbProps(dbObj, context.dumpDirectory)); - // note that we do not set location - for repl load, we want that auto-created. - createDbDesc.setIfNotExists(false); + CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getName(), dbObj.getDescription(), null, false, + updateDbProps(dbObj, context.dumpDirectory)); // If it exists, we want this to be an error condition. Repl Load is not intended to replace a // db. // TODO: we might revisit this in create-drop-recreate cases, needs some thinking on. - DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), createDbDesc); + DDLWork2 work = new DDLWork2(new HashSet<>(), new HashSet<>(), createDbDesc); return TaskFactory.get(work, context.hiveConf); } @@ -139,10 +135,9 @@ private boolean isDbEmpty(String dbName) throws HiveException { } private Task setOwnerInfoTask(Database dbObj) { - AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), - new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), - null); - DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); + AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), + dbObj.getOwnerType()), null); + DDLWork2 work = new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, context.hiveConf); } @@ -163,9 +158,8 @@ private boolean isDbEmpty(String dbName) throws HiveException { private static Task alterDbTask(String dbName, Map props, HiveConf hiveConf) { - AlterDatabaseDesc alterDbDesc = - new AlterDatabaseDesc(dbName, props, null); - DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); + AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, props, null); + DDLWork2 work = new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, hiveConf); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index 3e0c969..1c8d432 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.exec.repl.ReplStateLogWork; import org.apache.hadoop.hive.ql.exec.repl.util.AddDependencyToLeaves; import org.apache.hadoop.hive.ql.exec.repl.util.TaskTracker; @@ -45,7 +47,6 @@ import org.apache.hadoop.hive.ql.parse.repl.load.UpdatedMetaDataTracker; import org.apache.hadoop.hive.ql.parse.repl.load.log.IncrementalLoadLogger; import org.apache.hadoop.hive.ql.parse.repl.load.message.MessageHandler; -import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork; @@ -295,7 +296,7 @@ private boolean shouldReplayEvent(FileStatus dir, DumpType dumpType, String dbNa mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState); AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, mapProp, new ReplicationSpec(replState, replState)); - Task updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc), conf); + Task updateReplIdTask = TaskFactory.get(new DDLWork2(inputs, outputs, alterDbDesc), conf); // Link the update repl state task with dependency collection task if (preCursor != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java index 9ddd30c..eeefdca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java @@ -39,14 +39,14 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.exec.ddl.database.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.ShutdownHookManager; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java index 12c1027..249c4b7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java @@ -19,18 +19,17 @@ import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; -import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.metastore.api.CommitTxnRequest; import org.apache.hadoop.hive.metastore.api.LockResponse; import org.apache.hadoop.hive.metastore.api.TxnToWriteId; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver.LockedDriverState; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.exec.ddl.database.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import java.util.List; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java index 623b037..0f623ef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java @@ -29,6 +29,8 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver.LockedDriverState; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.exec.ddl.database.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; @@ -36,9 +38,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index adfa431..eaf3529 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -76,6 +76,15 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.AlterDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.DescDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.DropDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.ShowDatabasesDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.SwitchDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; @@ -101,7 +110,6 @@ import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc.OnePartitionDesc; -import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc; import org.apache.hadoop.hive.ql.plan.AlterMaterializedViewDesc.AlterMaterializedViewTypes; import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc; @@ -114,7 +122,6 @@ import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.CacheMetadataDesc; import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork; -import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMMappingDesc; import org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc; import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc; @@ -123,10 +130,8 @@ import org.apache.hadoop.hive.ql.plan.DDLDesc; import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; -import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc; @@ -140,7 +145,6 @@ import org.apache.hadoop.hive.ql.plan.KillQueryDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; -import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; @@ -153,7 +157,6 @@ import org.apache.hadoop.hive.ql.plan.ShowConfDesc; import org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; -import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; @@ -164,10 +167,8 @@ import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc; import org.apache.hadoop.hive.ql.plan.ShowTxnsDesc; import org.apache.hadoop.hive.ql.plan.StatsWork; -import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; @@ -787,7 +788,7 @@ private void analyzeAlterDatabaseProperties(ASTNode ast) throws SemanticExceptio private void addAlterDbDesc(AlterDatabaseDesc alterDesc) throws SemanticException { Database database = getDatabase(alterDesc.getDatabaseName()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterDesc))); } private void analyzeAlterDatabaseOwner(ASTNode ast) throws SemanticException { @@ -1361,16 +1362,11 @@ private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { } } - CreateDatabaseDesc createDatabaseDesc = - new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists); - if (dbProps != null) { - createDatabaseDesc.setDatabaseProperties(dbProps); - } + CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists, dbProps); Database database = new Database(dbName, dbComment, dbLocation, dbProps); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - createDatabaseDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), createDatabaseDesc))); } private void analyzeDropDatabase(ASTNode ast) throws SemanticException { @@ -1414,9 +1410,8 @@ private void analyzeDropDatabase(ASTNode ast) throws SemanticException { inputs.add(new ReadEntity(database)); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE)); - DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, - new ReplicationSpec()); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc))); + DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, new ReplicationSpec()); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), dropDatabaseDesc))); } private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { @@ -1426,8 +1421,7 @@ private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { dbReadEntity.noLockNeeded(); inputs.add(dbReadEntity); SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), - switchDatabaseDesc))); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), switchDatabaseDesc))); } @@ -2571,11 +2565,10 @@ private void analyzeDescDatabase(ASTNode ast) throws SemanticException { throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); } - DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), - dbName, isExtended); + DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended); inputs.add(new ReadEntity(getDatabase(dbName))); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc))); - setFetchTask(createFetchTask(descDbDesc.getSchema())); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descDbDesc))); + setFetchTask(createFetchTask(DescDatabaseDesc.DESC_DATABASE_SCHEMA)); } public static HashMap getPartSpec(ASTNode partspec) @@ -2658,8 +2651,8 @@ private void analyzeShowDatabases(ASTNode ast) throws SemanticException { } else { showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile()); } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showDatabasesDesc))); - setFetchTask(createFetchTask(showDatabasesDesc.getSchema())); + rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showDatabasesDesc))); + setFetchTask(createFetchTask(ShowDatabasesDesc.SHOW_DATABASES_SCHEMA)); } private void analyzeShowTables(ASTNode ast) throws SemanticException { @@ -3132,10 +3125,9 @@ private void analyzeLockDatabase(ASTNode ast) throws SemanticException { // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); - LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, - HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); - lockDatabaseDesc.setQueryStr(ctx.getCmd()); - DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); + LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), + ctx.getCmd()); + DDLWork2 work = new DDLWork2(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); ctx.setNeedLockMgr(true); } @@ -3151,7 +3143,7 @@ private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); - DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); + DDLWork2 work = new DDLWork2(getInputs(), getOutputs(), unlockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index e68e055..eb6cbea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -22,11 +22,11 @@ import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.parse.repl.dump.Utils; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; @@ -75,16 +75,14 @@ } newDbProps.put(key, entry.getValue()); } - alterDbDesc = new AlterDatabaseDesc(actualDbName, - newDbProps, context.eventOnlyReplicationSpec()); + alterDbDesc = new AlterDatabaseDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec()); } else { - alterDbDesc = new AlterDatabaseDesc(actualDbName, - new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), + alterDbDesc = new AlterDatabaseDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec()); } - Task alterDbTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, alterDbDesc), context.hiveConf); + Task alterDbTask = TaskFactory.get( + new DDLWork2(readEntitySet, writeEntitySet, alterDbDesc), context.hiveConf); context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index f8d8d1a..1205ee3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -25,18 +25,17 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.AlterDatabaseDesc; +import org.apache.hadoop.hive.ql.exec.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; -import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import java.io.IOException; import java.io.Serializable; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -58,23 +57,22 @@ context.dbName == null ? db.getName() : context.dbName; CreateDatabaseDesc createDatabaseDesc = - new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, true); - createDatabaseDesc.setDatabaseProperties(db.getParameters()); - Task createDBTask = TaskFactory.get( - new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc), context.hiveConf); + new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, true, db.getParameters()); + Task createDBTask = TaskFactory.get( + new DDLWork2(new HashSet<>(), new HashSet<>(), createDatabaseDesc), context.hiveConf); if (!db.getParameters().isEmpty()) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec()); - Task alterDbProperties = TaskFactory - .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc), context.hiveConf); + Task alterDbProperties = TaskFactory + .get(new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbDesc), context.hiveConf); createDBTask.addDependentTask(alterDbProperties); } if (StringUtils.isNotEmpty(db.getOwnerName())) { AlterDatabaseDesc alterDbOwner = new AlterDatabaseDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec()); - Task alterDbTask = TaskFactory - .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner), context.hiveConf); + Task alterDbTask = TaskFactory + .get(new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbOwner), context.hiveConf); createDBTask.addDependentTask(alterDbTask); } updatedMetadata diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java index 8b11a9e..e0fd11c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java @@ -20,9 +20,9 @@ import org.apache.hadoop.hive.metastore.messaging.DropDatabaseMessage; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.exec.ddl.database.DropDatabaseDesc; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import java.io.Serializable; import java.util.Collections; @@ -36,11 +36,9 @@ DropDatabaseMessage msg = deserializer.getDropDatabaseMessage(context.dmd.getPayload()); String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName; - DropDatabaseDesc desc = - new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec()); + DropDatabaseDesc desc = new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec()); Task dropDBTask = - TaskFactory - .get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), context.hiveConf); + TaskFactory.get(new DDLWork2(new HashSet<>(), new HashSet<>(), desc), context.hiveConf); context.log.info( "Added drop database task : {}:{}", dropDBTask.getId(), desc.getDatabaseName()); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java deleted file mode 100644 index 347ed97..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -import java.util.Map; - -import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - -/** - * AlterDatabaseDesc. - * - */ -@Explain(displayName = "Alter Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterDatabaseDesc extends DDLDesc implements Serializable { - - private static final long serialVersionUID = 1L; - - // Only altering the database property and owner is currently supported - public static enum ALTER_DB_TYPES { - ALTER_PROPERTY, ALTER_OWNER, ALTER_LOCATION - }; - - ALTER_DB_TYPES alterType; - String databaseName; - Map dbProperties; - PrincipalDesc ownerPrincipal; - ReplicationSpec replicationSpec; - String location; - - /** - * For serialization only. - */ - public AlterDatabaseDesc() { - } - - public AlterDatabaseDesc(String databaseName, Map dbProps, - ReplicationSpec replicationSpec) { - super(); - this.databaseName = databaseName; - this.replicationSpec = replicationSpec; - this.setDatabaseProperties(dbProps); - this.setAlterType(ALTER_DB_TYPES.ALTER_PROPERTY); - } - - public AlterDatabaseDesc(String databaseName, PrincipalDesc ownerPrincipal, - ReplicationSpec replicationSpec) { - this.databaseName = databaseName; - this.replicationSpec = replicationSpec; - this.setOwnerPrincipal(ownerPrincipal); - this.setAlterType(ALTER_DB_TYPES.ALTER_OWNER); - } - - public AlterDatabaseDesc(String databaseName, String newLocation) { - this.databaseName = databaseName; - this.setLocation(newLocation); - this.setAlterType(ALTER_DB_TYPES.ALTER_LOCATION); - } - - @Explain(displayName="properties") - public Map getDatabaseProperties() { - return dbProperties; - } - - public void setDatabaseProperties(Map dbProps) { - this.dbProperties = dbProps; - } - - @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getDatabaseName() { - return databaseName; - } - - public void setDatabaseName(String databaseName) { - this.databaseName = databaseName; - } - - @Explain(displayName="owner") - public PrincipalDesc getOwnerPrincipal() { - return ownerPrincipal; - } - - public void setOwnerPrincipal(PrincipalDesc ownerPrincipal) { - this.ownerPrincipal = ownerPrincipal; - } - - @Explain(displayName="location") - public String getLocation() { - return location; - } - - public void setLocation(String location) { - this.location = location; - } - public ALTER_DB_TYPES getAlterType() { - return alterType; - } - - public void setAlterType(ALTER_DB_TYPES alterType) { - this.alterType = alterType; - } - - /** - * @return what kind of replication scope this alter is running under. - * This can result in a "ALTER IF NEWER THAN" kind of semantic - */ - public ReplicationSpec getReplicationSpec() { - return this.replicationSpec; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java deleted file mode 100644 index f2e6a77..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -import java.util.Map; - -import org.apache.hadoop.hive.ql.plan.Explain.Level; - -/** - * CreateDatabaseDesc. - * - */ -@Explain(displayName = "Create Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class CreateDatabaseDesc extends DDLDesc implements Serializable { - - private static final long serialVersionUID = 1L; - - String databaseName; - String locationUri; - String comment; - boolean ifNotExists; - Map dbProperties; - - /** - * For serialization only. - */ - public CreateDatabaseDesc() { - } - - public CreateDatabaseDesc(String databaseName, String comment, - String locationUri, boolean ifNotExists) { - super(); - this.databaseName = databaseName; - this.comment = comment; - this.locationUri = locationUri; - this.ifNotExists = ifNotExists; - this.dbProperties = null; - } - - @Explain(displayName="if not exists", displayOnlyOnTrue = true) - public boolean getIfNotExists() { - return ifNotExists; - } - - public void setIfNotExists(boolean ifNotExists) { - this.ifNotExists = ifNotExists; - } - - public Map getDatabaseProperties() { - return dbProperties; - } - - public void setDatabaseProperties(Map dbProps) { - this.dbProperties = dbProps; - } - - @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getName() { - return databaseName; - } - - public void setName(String databaseName) { - this.databaseName = databaseName; - } - - @Explain(displayName="comment") - public String getComment() { - return comment; - } - - public void setComment(String comment) { - this.comment = comment; - } - - @Explain(displayName="locationUri") - public String getLocationUri() { - return locationUri; - } - - public void setLocationUri(String locationUri) { - this.locationUri = locationUri; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 8ed3b03..d05ce18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -37,17 +37,11 @@ private PreInsertTableDesc preInsertTableDesc; private InsertCommitHookDesc insertCommitHookDesc; private AlterMaterializedViewDesc alterMVDesc; - private CreateDatabaseDesc createDatabaseDesc; - private SwitchDatabaseDesc switchDatabaseDesc; - private DropDatabaseDesc dropDatabaseDesc; - private LockDatabaseDesc lockDatabaseDesc; - private UnlockDatabaseDesc unlockDatabaseDesc; private CreateTableDesc createTblDesc; private CreateTableLikeDesc createTblLikeDesc; private CreateViewDesc createVwDesc; private DropTableDesc dropTblDesc; private AlterTableDesc alterTblDesc; - private ShowDatabasesDesc showDatabasesDesc; private ShowTablesDesc showTblsDesc; private ShowColumnsDesc showColumnsDesc; private ShowTblPropertiesDesc showTblPropertiesDesc; @@ -68,8 +62,6 @@ private AlterTableSimpleDesc alterTblSimpleDesc; private MsckDesc msckDesc; private ShowTableStatusDesc showTblStatusDesc; - private DescDatabaseDesc descDbDesc; - private AlterDatabaseDesc alterDbDesc; private AlterTableAlterPartDesc alterTableAlterPartDesc; private TruncateTableDesc truncateTblDesc; private AlterTableExchangePartition alterTableExchangePartition; @@ -121,33 +113,6 @@ public DDLWork(HashSet inputs, HashSet outputs) { this.outputs = outputs; } - /** - * @param createDatabaseDesc - * Create Database descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - CreateDatabaseDesc createDatabaseDesc) { - this(inputs, outputs); - this.createDatabaseDesc = createDatabaseDesc; - } - - /** - * @param inputs - * @param outputs - * @param descDatabaseDesc Database descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - DescDatabaseDesc descDatabaseDesc) { - this(inputs, outputs); - this.descDbDesc = descDatabaseDesc; - } - - public DDLWork(HashSet inputs, HashSet outputs, - AlterDatabaseDesc alterDbDesc) { - this(inputs, outputs); - this.alterDbDesc = alterDbDesc; - } - public DDLWork(HashSet inputs, HashSet outputs, TruncateTableDesc truncateTblDesc) { this(inputs, outputs); @@ -160,30 +125,6 @@ public DDLWork(HashSet inputs, HashSet outputs, this.showConfDesc = showConfDesc; } - public DescDatabaseDesc getDescDatabaseDesc() { - return descDbDesc; - } - - /** - * @param dropDatabaseDesc - * Drop Database descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - DropDatabaseDesc dropDatabaseDesc) { - this(inputs, outputs); - this.dropDatabaseDesc = dropDatabaseDesc; - } - - /** - * @param switchDatabaseDesc - * Switch Database descriptor - */ - public DDLWork(HashSet inputs, HashSet outputs, - SwitchDatabaseDesc switchDatabaseDesc) { - this(inputs, outputs); - this.switchDatabaseDesc = switchDatabaseDesc; - } - /** * @param alterTblDesc * alter table descriptor @@ -259,16 +200,6 @@ public DDLWork(HashSet inputs, HashSet outputs, } /** - * @param showDatabasesDesc - */ - public DDLWork(HashSet inputs, HashSet outputs, - ShowDatabasesDesc showDatabasesDesc) { - this(inputs, outputs); - - this.showDatabasesDesc = showDatabasesDesc; - } - - /** * @param showTblsDesc */ public DDLWork(HashSet inputs, HashSet outputs, @@ -309,24 +240,6 @@ public DDLWork(HashSet inputs, HashSet outputs, } /** - * @param lockDatabaseDesc - */ - public DDLWork(HashSet inputs, HashSet outputs, - LockDatabaseDesc lockDatabaseDesc) { - this(inputs, outputs); - this.lockDatabaseDesc = lockDatabaseDesc; - } - - /** - * @param unlockDatabaseDesc - */ - public DDLWork(HashSet inputs, HashSet outputs, - UnlockDatabaseDesc unlockDatabaseDesc) { - this(inputs, outputs); - this.unlockDatabaseDesc = unlockDatabaseDesc; - } - - /** * @param showFuncsDesc */ public DDLWork(HashSet inputs, HashSet outputs, @@ -613,67 +526,6 @@ public DDLWork(HashSet inputs, HashSet outputs, } /** - * @return Create Database descriptor - */ - public CreateDatabaseDesc getCreateDatabaseDesc() { - return createDatabaseDesc; - } - - /** - * Set Create Database descriptor - * @param createDatabaseDesc - */ - public void setCreateDatabaseDesc(CreateDatabaseDesc createDatabaseDesc) { - this.createDatabaseDesc = createDatabaseDesc; - } - - /** - * @return Drop Database descriptor - */ - public DropDatabaseDesc getDropDatabaseDesc() { - return dropDatabaseDesc; - } - - /** - * Set Drop Database descriptor - * @param dropDatabaseDesc - */ - public void setDropDatabaseDesc(DropDatabaseDesc dropDatabaseDesc) { - this.dropDatabaseDesc = dropDatabaseDesc; - } - - /** - * @return Switch Database descriptor - */ - public SwitchDatabaseDesc getSwitchDatabaseDesc() { - return switchDatabaseDesc; - } - - /** - * Set Switch Database descriptor - * @param switchDatabaseDesc - */ - public void setSwitchDatabaseDesc(SwitchDatabaseDesc switchDatabaseDesc) { - this.switchDatabaseDesc = switchDatabaseDesc; - } - - public LockDatabaseDesc getLockDatabaseDesc() { - return lockDatabaseDesc; - } - - public void setLockDatabaseDesc(LockDatabaseDesc lockDatabaseDesc) { - this.lockDatabaseDesc = lockDatabaseDesc; - } - - public UnlockDatabaseDesc getUnlockDatabaseDesc() { - return unlockDatabaseDesc; - } - - public void setUnlockDatabaseDesc(UnlockDatabaseDesc unlockDatabaseDesc) { - this.unlockDatabaseDesc = unlockDatabaseDesc; - } - - /** * @return the createTblDesc */ @Explain(displayName = "Create Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -682,14 +534,6 @@ public CreateTableDesc getCreateTblDesc() { } /** - * @param createTblDesc - * the createTblDesc to set - */ - public void setCreateTblDesc(CreateTableDesc createTblDesc) { - this.createTblDesc = createTblDesc; - } - - /** * @return the createTblDesc */ @Explain(displayName = "Create Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -698,14 +542,6 @@ public CreateTableLikeDesc getCreateTblLikeDesc() { } /** - * @param createTblLikeDesc - * the createTblDesc to set - */ - public void setCreateTblLikeDesc(CreateTableLikeDesc createTblLikeDesc) { - this.createTblLikeDesc = createTblLikeDesc; - } - - /** * @return the createTblDesc */ @Explain(displayName = "Create View Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -714,14 +550,6 @@ public CreateViewDesc getCreateViewDesc() { } /** - * @param createVwDesc - * the createViewDesc to set - */ - public void setCreateViewDesc(CreateViewDesc createVwDesc) { - this.createVwDesc = createVwDesc; - } - - /** * @return the dropTblDesc */ @Explain(displayName = "Drop Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -730,14 +558,6 @@ public DropTableDesc getDropTblDesc() { } /** - * @param dropTblDesc - * the dropTblDesc to set - */ - public void setDropTblDesc(DropTableDesc dropTblDesc) { - this.dropTblDesc = dropTblDesc; - } - - /** * @return the alterTblDesc */ @Explain(displayName = "Alter Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -745,13 +565,6 @@ public AlterTableDesc getAlterTblDesc() { return alterTblDesc; } - /** - * @param alterTblDesc - * the alterTblDesc to set - */ - public void setAlterTblDesc(AlterTableDesc alterTblDesc) { - this.alterTblDesc = alterTblDesc; - } /** * @return the alterMVDesc @@ -762,30 +575,6 @@ public AlterMaterializedViewDesc getAlterMaterializedViewDesc() { } /** - * @param alterMVDesc - * the alterMVDesc to set - */ - public void setAlterMVDesc(AlterMaterializedViewDesc alterMVDesc) { - this.alterMVDesc = alterMVDesc; - } - - /** - * @return the showDatabasesDesc - */ - @Explain(displayName = "Show Databases Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public ShowDatabasesDesc getShowDatabasesDesc() { - return showDatabasesDesc; - } - - /** - * @param showDatabasesDesc - * the showDatabasesDesc to set - */ - public void setShowDatabasesDesc(ShowDatabasesDesc showDatabasesDesc) { - this.showDatabasesDesc = showDatabasesDesc; - } - - /** * @return the showTblsDesc */ @Explain(displayName = "Show Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -794,14 +583,6 @@ public ShowTablesDesc getShowTblsDesc() { } /** - * @param showTblsDesc - * the showTblsDesc to set - */ - public void setShowTblsDesc(ShowTablesDesc showTblsDesc) { - this.showTblsDesc = showTblsDesc; - } - - /** * @return the showColumnsDesc */ @Explain(displayName = "Show Columns Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -810,14 +591,6 @@ public ShowColumnsDesc getShowColumnsDesc() { } /** - * @param showColumnsDesc - * the showColumnsDesc to set - */ - public void setShowColumnsDesc(ShowColumnsDesc showColumnsDesc) { - this.showColumnsDesc = showColumnsDesc; - } - - /** * @return the showFuncsDesc */ @Explain(displayName = "Show Function Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -878,62 +651,6 @@ public KillQueryDesc getKillQueryDesc() { } /** - * @param showFuncsDesc - * the showFuncsDesc to set - */ - public void setShowFuncsDesc(ShowFunctionsDesc showFuncsDesc) { - this.showFuncsDesc = showFuncsDesc; - } - - /** - * @param showLocksDesc - * the showLocksDesc to set - */ - public void setShowLocksDesc(ShowLocksDesc showLocksDesc) { - this.showLocksDesc = showLocksDesc; - } - - public void setShowCompactionsDesc(ShowCompactionsDesc showCompactionsDesc) { - this.showCompactionsDesc = showCompactionsDesc; - } - - public void setShowTxnsDesc(ShowTxnsDesc showTxnsDesc) { - this.showTxnsDesc = showTxnsDesc; - } - - public void setAbortTxnsDesc(AbortTxnsDesc abortTxnsDesc) { - this.abortTxnsDesc = abortTxnsDesc; - } - - public void setKillQueryDesc(KillQueryDesc killQueryDesc) { - this.killQueryDesc = killQueryDesc; - } - - /** - * @param lockTblDesc - * the lockTblDesc to set - */ - public void setLockTblDesc(LockTableDesc lockTblDesc) { - this.lockTblDesc = lockTblDesc; - } - - /** - * @param unlockTblDesc - * the unlockTblDesc to set - */ - public void setUnlockTblDesc(UnlockTableDesc unlockTblDesc) { - this.unlockTblDesc = unlockTblDesc; - } - - /** - * @param descFuncDesc - * the showFuncsDesc to set - */ - public void setDescFuncDesc(DescFunctionDesc descFuncDesc) { - descFunctionDesc = descFuncDesc; - } - - /** * @return the showPartsDesc */ @Explain(displayName = "Show Partitions Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -941,24 +658,12 @@ public ShowPartitionsDesc getShowPartsDesc() { return showPartsDesc; } - /** - * @param showPartsDesc - * the showPartsDesc to set - */ - public void setShowPartsDesc(ShowPartitionsDesc showPartsDesc) { - this.showPartsDesc = showPartsDesc; - } - @Explain(displayName = "Show Create Database Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public ShowCreateDatabaseDesc getShowCreateDbDesc() { return showCreateDbDesc; } - public void setShowCreateDbDesc(ShowCreateDatabaseDesc showCreateDbDesc) { - this.showCreateDbDesc = showCreateDbDesc; - } - /** * @return the showCreateTblDesc */ @@ -968,14 +673,6 @@ public ShowCreateTableDesc getShowCreateTblDesc() { } /** - * @param showCreateTblDesc - * the showCreateTblDesc to set - */ - public void setShowCreateTblDesc(ShowCreateTableDesc showCreateTblDesc) { - this.showCreateTblDesc = showCreateTblDesc; - } - - /** * @return the descTblDesc */ @Explain(displayName = "Describe Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -984,14 +681,6 @@ public DescTableDesc getDescTblDesc() { } /** - * @param descTblDesc - * the descTblDesc to set - */ - public void setDescTblDesc(DescTableDesc descTblDesc) { - this.descTblDesc = descTblDesc; - } - - /** * @return information about the partitions we want to add. */ @Explain(displayName = "Add Partition Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) @@ -1000,14 +689,6 @@ public AddPartitionDesc getAddPartitionDesc() { } /** - * @param addPartitionDesc - * information about the partitions we want to add. - */ - public void setAddPartitionDesc(AddPartitionDesc addPartitionDesc) { - this.addPartitionDesc = addPartitionDesc; - } - - /** * @return information about the partitions we want to rename. */ public RenamePartitionDesc getRenamePartitionDesc() { @@ -1015,14 +696,6 @@ public RenamePartitionDesc getRenamePartitionDesc() { } /** - * @param renamePartitionDesc - * information about the partitions we want to rename. - */ - public void setRenamePartitionDesc(RenamePartitionDesc renamePartitionDesc) { - this.renamePartitionDesc = renamePartitionDesc; - } - - /** * @return information about the table/partitions we want to alter. */ public AlterTableSimpleDesc getAlterTblSimpleDesc() { @@ -1030,14 +703,6 @@ public AlterTableSimpleDesc getAlterTblSimpleDesc() { } /** - * @param desc - * information about the table/partitions we want to alter. - */ - public void setAlterTblSimpleDesc(AlterTableSimpleDesc desc) { - this.alterTblSimpleDesc = desc; - } - - /** * @return Metastore check description */ public MsckDesc getMsckDesc() { @@ -1045,40 +710,16 @@ public MsckDesc getMsckDesc() { } /** - * @param msckDesc - * metastore check description - */ - public void setMsckDesc(MsckDesc msckDesc) { - this.msckDesc = msckDesc; - } - - /** * @return show table descriptor */ public ShowTableStatusDesc getShowTblStatusDesc() { return showTblStatusDesc; } - /** - * @param showTblStatusDesc - * show table descriptor - */ - public void setShowTblStatusDesc(ShowTableStatusDesc showTblStatusDesc) { - this.showTblStatusDesc = showTblStatusDesc; - } - public ShowTblPropertiesDesc getShowTblPropertiesDesc() { return showTblPropertiesDesc; } - public void setShowTblPropertiesDesc(ShowTblPropertiesDesc showTblPropertiesDesc) { - this.showTblPropertiesDesc = showTblPropertiesDesc; - } - - public void setDescFunctionDesc(DescFunctionDesc descFunctionDesc) { - this.descFunctionDesc = descFunctionDesc; - } - public HashSet getInputs() { return inputs; } @@ -1087,14 +728,6 @@ public void setDescFunctionDesc(DescFunctionDesc descFunctionDesc) { return outputs; } - public void setInputs(HashSet inputs) { - this.inputs = inputs; - } - - public void setOutputs(HashSet outputs) { - this.outputs = outputs; - } - /** * @return role ddl desc */ @@ -1103,13 +736,6 @@ public RoleDDLDesc getRoleDDLDesc() { } /** - * @param roleDDLDesc role ddl desc - */ - public void setRoleDDLDesc(RoleDDLDesc roleDDLDesc) { - this.roleDDLDesc = roleDDLDesc; - } - - /** * @return grant desc */ public GrantDesc getGrantDesc() { @@ -1117,67 +743,27 @@ public GrantDesc getGrantDesc() { } /** - * @param grantDesc grant desc - */ - public void setGrantDesc(GrantDesc grantDesc) { - this.grantDesc = grantDesc; - } - - /** * @return show grant desc */ public ShowGrantDesc getShowGrantDesc() { return showGrantDesc; } - /** - * @param showGrantDesc - */ - public void setShowGrantDesc(ShowGrantDesc showGrantDesc) { - this.showGrantDesc = showGrantDesc; - } - public RevokeDesc getRevokeDesc() { return revokeDesc; } - public void setRevokeDesc(RevokeDesc revokeDesc) { - this.revokeDesc = revokeDesc; - } - public GrantRevokeRoleDDL getGrantRevokeRoleDDL() { return grantRevokeRoleDDL; } /** - * @param grantRevokeRoleDDL - */ - public void setGrantRevokeRoleDDL(GrantRevokeRoleDDL grantRevokeRoleDDL) { - this.grantRevokeRoleDDL = grantRevokeRoleDDL; - } - - public void setAlterDatabaseDesc(AlterDatabaseDesc alterDbDesc) { - this.alterDbDesc = alterDbDesc; - } - - public AlterDatabaseDesc getAlterDatabaseDesc() { - return this.alterDbDesc; - } - - /** * @return descriptor for merging files */ public AlterTablePartMergeFilesDesc getMergeFilesDesc() { return mergeFilesDesc; } - /** - * @param mergeDesc descriptor of merging files - */ - public void setMergeFilesDesc(AlterTablePartMergeFilesDesc mergeDesc) { - this.mergeFilesDesc = mergeDesc; - } - public boolean getNeedLock() { return needLock; } @@ -1193,23 +779,11 @@ public AlterTableAlterPartDesc getAlterTableAlterPartDesc() { return alterTableAlterPartDesc; } - /** - * @param alterPartitionDesc - * information about the partitions we want to change. - */ - public void setAlterTableAlterPartDesc(AlterTableAlterPartDesc alterPartitionDesc) { - this.alterTableAlterPartDesc = alterPartitionDesc; - } - @Explain(displayName = "Truncate Table Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public TruncateTableDesc getTruncateTblDesc() { return truncateTblDesc; } - public void setTruncateTblDesc(TruncateTableDesc truncateTblDesc) { - this.truncateTblDesc = truncateTblDesc; - } - /** * @return information about the table partition to be exchanged */ @@ -1224,136 +798,67 @@ public CacheMetadataDesc getCacheMetadataDesc() { return this.cacheMetadataDesc; } - /** - * @param alterTableExchangePartition - * set the value of the table partition to be exchanged - */ - public void setAlterTableExchangePartition( - AlterTableExchangePartition alterTableExchangePartition) { - this.alterTableExchangePartition = alterTableExchangePartition; - } - public ShowConfDesc getShowConfDesc() { return showConfDesc; } - public void setShowConfDesc(ShowConfDesc showConfDesc) { - this.showConfDesc = showConfDesc; - } - @Explain(displayName = "Insert operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public InsertCommitHookDesc getInsertCommitHookDesc() { return insertCommitHookDesc; } - public void setInsertCommitHookDesc(InsertCommitHookDesc insertCommitHookDesc) { - this.insertCommitHookDesc = insertCommitHookDesc; - } - @Explain(displayName = "Pre Insert operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public PreInsertTableDesc getPreInsertTableDesc() { return preInsertTableDesc; } - public void setPreInsertTableDesc(PreInsertTableDesc preInsertTableDesc) { - this.preInsertTableDesc = preInsertTableDesc; - } - @Explain(displayName = "Create resource plan") public CreateResourcePlanDesc getCreateResourcePlanDesc() { return createResourcePlanDesc; } - public void setCreateResourcePlanDesc(CreateResourcePlanDesc createResourcePlanDesc) { - this.createResourcePlanDesc = createResourcePlanDesc; - } - @Explain(displayName = "Show resource plan") public ShowResourcePlanDesc getShowResourcePlanDesc() { return showResourcePlanDesc; } - public void setShowResourcePlanDesc(ShowResourcePlanDesc showResourcePlanDesc) { - this.showResourcePlanDesc = showResourcePlanDesc; - } - public DropResourcePlanDesc getDropResourcePlanDesc() { return dropResourcePlanDesc; } - public void setDropResourcePlanDesc(DropResourcePlanDesc dropResourcePlanDesc) { - this.dropResourcePlanDesc = dropResourcePlanDesc; - } - public AlterResourcePlanDesc getAlterResourcePlanDesc() { return alterResourcePlanDesc; } - public void setAlterResourcePlanDesc(AlterResourcePlanDesc alterResourcePlanDesc) { - this.alterResourcePlanDesc = alterResourcePlanDesc; - } - public CreateWMTriggerDesc getCreateWMTriggerDesc() { return createWMTriggerDesc; } - public void setCreateWMTriggerDesc(CreateWMTriggerDesc createWMTriggerDesc) { - this.createWMTriggerDesc = createWMTriggerDesc; - } - public AlterWMTriggerDesc getAlterWMTriggerDesc() { return alterWMTriggerDesc; } - public void setAlterWMTriggerDesc(AlterWMTriggerDesc alterWMTriggerDesc) { - this.alterWMTriggerDesc = alterWMTriggerDesc; - } - public DropWMTriggerDesc getDropWMTriggerDesc() { return dropWMTriggerDesc; } - public void setDropWMTriggerDesc(DropWMTriggerDesc dropWMTriggerDesc) { - this.dropWMTriggerDesc = dropWMTriggerDesc; - } - public CreateOrAlterWMPoolDesc getWmPoolDesc() { return wmPoolDesc; } - public void setWmPoolDesc(CreateOrAlterWMPoolDesc wmPoolDesc) { - this.wmPoolDesc = wmPoolDesc; - } - public DropWMPoolDesc getDropWMPoolDesc() { return dropWMPoolDesc; } - public void setDropWMPoolDesc(DropWMPoolDesc dropWMPoolDesc) { - this.dropWMPoolDesc = dropWMPoolDesc; - } - public CreateOrAlterWMMappingDesc getWmMappingDesc() { return wmMappingDesc; } - public void setWmMappingDesc(CreateOrAlterWMMappingDesc wmMappingDesc) { - this.wmMappingDesc = wmMappingDesc; - } - public DropWMMappingDesc getDropWMMappingDesc() { return dropWMMappingDesc; } - public void setDropWMMappingDesc(DropWMMappingDesc dropWMMappingDesc) { - this.dropWMMappingDesc = dropWMMappingDesc; - } - public CreateOrDropTriggerToPoolMappingDesc getTriggerToPoolMappingDesc() { return triggerToPoolMappingDesc; } - - public void setTriggerToPoolMappingDesc(CreateOrDropTriggerToPoolMappingDesc triggerToPoolMappingDesc) { - this.triggerToPoolMappingDesc = triggerToPoolMappingDesc; - } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java deleted file mode 100644 index 6bc34ed..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - -/** - * DescDatabaseDesc. - * - */ -@Explain(displayName = "Describe Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class DescDatabaseDesc extends DDLDesc implements Serializable { - - private static final long serialVersionUID = 1L; - - String dbName; - String resFile; - boolean isExt; - - /** - * thrift ddl for the result of describe database. - */ - private static final String schema = "db_name,comment,location,owner_name,owner_type,parameters#string:string:string:string:string:string"; - - public DescDatabaseDesc() { - } - - /** - * @param resFile - * @param dbName - * @param isExt - */ - public DescDatabaseDesc(Path resFile, String dbName, boolean isExt) { - this.isExt = isExt; - this.resFile = resFile.toString(); - this.dbName = dbName; - } - - public static String getSchema() { - return schema; - } - - /** - * @return the isExt - */ - public boolean isExt() { - return isExt; - } - - /** - * @param isExt - * the isExt to set - */ - public void setExt(boolean isExt) { - this.isExt = isExt; - } - - /** - * @return the tableName - */ - @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getDatabaseName() { - return dbName; - } - - /** - * @param db - * the database name to set - */ - public void setDatabaseName(String db) { - this.dbName = db; - } - - /** - * @return the resFile - */ - @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) - public String getResFile() { - return resFile; - } - - /** - * @param resFile - * the resFile to set - */ - public void setResFile(String resFile) { - this.resFile = resFile; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java deleted file mode 100644 index deaa7cd..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -/** - * DropDatabaseDesc. - * - */ -@Explain(displayName = "Drop Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class DropDatabaseDesc extends DDLDesc implements Serializable { - private static final long serialVersionUID = 1L; - - String databaseName; - boolean ifExists; - boolean cascade; - ReplicationSpec replicationSpec; - - public DropDatabaseDesc(String databaseName, boolean ifExists, - ReplicationSpec replicationSpec) { - this(databaseName, ifExists, false, replicationSpec); - } - - public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, - ReplicationSpec replicationSpec) { - super(); - this.databaseName = databaseName; - this.ifExists = ifExists; - this.cascade = cascade; - this.replicationSpec = replicationSpec; - } - - @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getDatabaseName() { - return databaseName; - } - - public void setDatabaseName(String databaseName) { - this.databaseName = databaseName; - } - - @Explain(displayName = "if exists") - public boolean getIfExists() { - return ifExists; - } - - public void setIfExists(boolean ifExists) { - this.ifExists = ifExists; - } - - public boolean isCasdade() { - return cascade; - } - - public void setIsCascade(boolean cascade) { - this.cascade = cascade; - } - - public ReplicationSpec getReplicationSpec() { - return replicationSpec; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java deleted file mode 100644 index 08ce59e..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -/** - * LockDatabaseDesc. - * - */ -@Explain(displayName = "Lock Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class LockDatabaseDesc extends DDLDesc implements Serializable { - private static final long serialVersionUID = 1L; - - private String databaseName; - private String mode; - private String queryId; - private String queryStr; - - public LockDatabaseDesc() { - } - - public LockDatabaseDesc(String databaseName, String mode, String queryId) { - this.databaseName = databaseName; - this.mode = mode; - this.queryId = queryId; - } - - @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getDatabaseName() { - return databaseName; - } - - public void setDatabaseName(String databaseName) { - this.databaseName = databaseName; - } - - public void setMode(String mode) { - this.mode = mode; - } - - public String getMode() { - return mode; - } - - public String getQueryId() { - return queryId; - } - - public void setQueryId(String queryId) { - this.queryId = queryId; - } - - public String getQueryStr() { - return queryStr; - } - - public void setQueryStr(String queryStr) { - this.queryStr = queryStr; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java deleted file mode 100644 index 8724c74..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -/** - * ShowDatabasesDesc. - * - */ -@Explain(displayName = "Show Databases", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class ShowDatabasesDesc extends DDLDesc implements Serializable { - private static final long serialVersionUID = 1L; - String pattern; - String resFile; - - /** - * table name for the result of show databases. - */ - private static final String table = "show_databases"; - - /** - * thrift ddl for the result of show databases. - */ - private static final String schema = "database_name#string"; - - public String getTable() { - return table; - } - - public String getSchema() { - return schema; - } - - public ShowDatabasesDesc() { - } - - /** - * @param resFile - */ - public ShowDatabasesDesc(Path resFile) { - this.resFile = resFile.toString(); - pattern = null; - } - - /** - * @param pattern - * names of databases to show - */ - public ShowDatabasesDesc(Path resFile, String pattern) { - this.resFile = resFile.toString(); - this.pattern = pattern; - } - - /** - * @return the pattern - */ - @Explain(displayName = "pattern") - public String getPattern() { - return pattern; - } - - /** - * @param pattern - * the pattern to set - */ - public void setPattern(String pattern) { - this.pattern = pattern; - } - - /** - * @return the resFile - */ - @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) - public String getResFile() { - return resFile; - } - - /** - * @param resFile - * the resFile to set - */ - public void setResFile(String resFile) { - this.resFile = resFile; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java deleted file mode 100644 index da7d200..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -/** - * SwitchDatabaseDesc. - * - */ -@Explain(displayName = "Switch Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class SwitchDatabaseDesc extends DDLDesc implements Serializable { - - private static final long serialVersionUID = 1L; - - String databaseName; - - public SwitchDatabaseDesc() { - } - - public SwitchDatabaseDesc(String databaseName) { - super(); - this.databaseName = databaseName; - } - - @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getDatabaseName() { - return databaseName; - } - - public void setDatabaseName(String databaseName) { - this.databaseName = databaseName; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java deleted file mode 100644 index a1cb797..0000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; -import org.apache.hadoop.hive.ql.plan.Explain.Level; - - -/** - * UnlockDatabaseDesc. - * - */ -@Explain(displayName = "Unlock Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class UnlockDatabaseDesc extends DDLDesc implements Serializable { - private static final long serialVersionUID = 1L; - - private String databaseName; - - public UnlockDatabaseDesc(String databaseName) { - this.databaseName = databaseName; - } - - @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getDatabaseName() { - return databaseName; - } - - public void setDatabaseName(String databaseName) { - this.databaseName = databaseName; - } - -} diff --git a/ql/src/test/results/clientnegative/database_create_already_exists.q.out b/ql/src/test/results/clientnegative/database_create_already_exists.q.out index 16f45ba..ed9528a 100644 --- a/ql/src/test/results/clientnegative/database_create_already_exists.q.out +++ b/ql/src/test/results/clientnegative/database_create_already_exists.q.out @@ -12,4 +12,4 @@ POSTHOOK: Output: database:test_db PREHOOK: query: CREATE DATABASE test_db PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:test_db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database test_db already exists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. Database test_db already exists diff --git a/ql/src/test/results/clientnegative/database_create_invalid_name.q.out b/ql/src/test/results/clientnegative/database_create_invalid_name.q.out index 7a765e1..8dd768c 100644 --- a/ql/src/test/results/clientnegative/database_create_invalid_name.q.out +++ b/ql/src/test/results/clientnegative/database_create_invalid_name.q.out @@ -6,4 +6,4 @@ default PREHOOK: query: CREATE DATABASE `test.db` PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:test.db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:test.db is not a valid database name) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. InvalidObjectException(message:test.db is not a valid database name) diff --git a/ql/src/test/results/clientnegative/database_drop_not_empty.q.out b/ql/src/test/results/clientnegative/database_drop_not_empty.q.out index 5758d67..e3317fc 100644 --- a/ql/src/test/results/clientnegative/database_drop_not_empty.q.out +++ b/ql/src/test/results/clientnegative/database_drop_not_empty.q.out @@ -33,4 +33,4 @@ PREHOOK: query: DROP DATABASE test_db PREHOOK: type: DROPDATABASE PREHOOK: Input: database:test_db PREHOOK: Output: database:test_db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database test_db is not empty. One or more tables exist.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. InvalidOperationException(message:Database test_db is not empty. One or more tables exist.) diff --git a/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out b/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out index 5034977..da1fb90 100644 --- a/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out +++ b/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out @@ -33,4 +33,4 @@ PREHOOK: query: DROP DATABASE db_drop_non_empty_restrict PREHOOK: type: DROPDATABASE PREHOOK: Input: database:db_drop_non_empty_restrict PREHOOK: Output: database:db_drop_non_empty_restrict -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty. One or more tables exist.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty. One or more tables exist.) diff --git a/ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out b/ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out index e783251..e9ed9ab 100644 --- a/ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out +++ b/ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out @@ -12,4 +12,4 @@ PREHOOK: query: lock database drop_nodblock shared PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:drop_nodblock PREHOOK: Output: database:drop_nodblock -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager diff --git a/ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out b/ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out index d7a39f0..29549b4 100644 --- a/ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out +++ b/ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out @@ -12,4 +12,4 @@ PREHOOK: query: unlock database drop_nodbunlock PREHOOK: type: UNLOCKDATABASE PREHOOK: Input: database:drop_nodbunlock PREHOOK: Output: database:drop_nodbunlock -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager diff --git a/ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out b/ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out index 2c46159..65c0ea0 100644 --- a/ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out +++ b/ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out @@ -59,4 +59,4 @@ PREHOOK: query: unlock database lockneg1 PREHOOK: type: UNLOCKDATABASE PREHOOK: Input: database:lockneg1 PREHOOK: Output: database:lockneg1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database lockneg1 is not locked +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. Database lockneg1 is not locked diff --git a/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out b/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out index fe12d83..60698db 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out @@ -17,4 +17,4 @@ PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg4 PREHOOK: Output: database:lockneg4 Unable to acquire EXPLICIT, SHARED lock lockneg4 after 1 attempts. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2 diff --git a/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out b/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out index e5c8f3e..81ecab0 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out @@ -49,4 +49,4 @@ PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg2 PREHOOK: Output: database:lockneg2 Unable to acquire EXPLICIT, EXCLUSIVE lock lockneg2 after 1 attempts. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2 diff --git a/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out b/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out index b73f3ac..39838ba 100644 --- a/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out +++ b/ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out @@ -230,7 +230,7 @@ PREHOOK: query: DROP DATABASE encrypted_db PREHOOK: type: DROPDATABASE PREHOOK: Input: database:encrypted_db PREHOOK: Output: database:encrypted_db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database encrypted_db is not empty. One or more tables exist.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.ddl.DDLTask2. InvalidOperationException(message:Database encrypted_db is not empty. One or more tables exist.) PREHOOK: query: DROP TABLE encrypted_db_outloc.renamed_encrypted_table_n1 PURGE PREHOOK: type: DROPTABLE PREHOOK: Input: encrypted_db_outloc@renamed_encrypted_table_n1