diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index eea2fcc..88e15db 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -2448,9 +2448,8 @@ private void analyzeUnlockTable(ASTNode ast) private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); - - //inputs.add(new ReadEntity(dbName)); - //outputs.add(new WriteEntity(dbName)); + inputs.add(new ReadEntity(getDatabase(dbName))); + outputs.add(new WriteEntity(getDatabase(dbName), WriteEntity.WriteType.DDL_SHARED)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); @@ -2461,7 +2460,8 @@ private void analyzeLockDatabase(ASTNode ast) throws SemanticException { private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); - + inputs.add(new ReadEntity(getDatabase(dbName))); + outputs.add(new WriteEntity(getDatabase(dbName), WriteEntity.WriteType.DDL_SHARED)); UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf));