diff --git hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out index c534a42..940899e 100644 --- hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out +++ hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out @@ -37,7 +37,11 @@ Found 3 items #### A masked pattern was here #### PREHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:hbaseDB +PREHOOK: Output: database:hbaseDB POSTHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:hbaseDB +POSTHOOK: Output: database:hbaseDB Command failed with exit code = -1 Query returned non-zero code: -1, cause: null diff --git hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java index 50734cb..2eba530 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java @@ -93,7 +93,7 @@ protected FsAction getFsAction(Privilege priv, Path path) { switch (priv.getPriv()) { case ALL: - throw new AuthorizationException("no matching Action for Privilege.All"); + return FsAction.READ_WRITE; case ALTER_DATA: return FsAction.WRITE; case ALTER_METADATA: diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 86db406..251f977 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.exec.ConditionalTask; @@ -56,6 +57,7 @@ import org.apache.hadoop.hive.ql.exec.TaskRunner; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; +import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; import org.apache.hadoop.hive.ql.hooks.Hook; import org.apache.hadoop.hive.ql.hooks.HookContext; @@ -492,9 +494,10 @@ public int compile(String command, boolean resetTaskIds) { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); doAuthorization(sem); } catch (AuthorizationException authExp) { - errorMessage = "Authorization failed:" + authExp.getMessage() - + ". Use show grant to get more details."; - console.printError(errorMessage); + console.printError("Authorization failed:" + authExp.getMessage() + + ". Use SHOW GRANT to get more details."); + errorMessage = authExp.getMessage(); + SQLState = "01542"; return 403; } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); @@ -537,7 +540,10 @@ private void doAuthorization(BaseSemanticAnalyzer sem) HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb(); if (op != null) { - if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) + if (op.equals(HiveOperation.CREATEDATABASE)) { + ss.getAuthorizer().authorize( + op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges()); + } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { ss.getAuthorizer().authorize( db.getDatabase(SessionState.get().getCurrentDatabase()), null, @@ -554,6 +560,11 @@ private void doAuthorization(BaseSemanticAnalyzer sem) } if (outputs != null && outputs.size() > 0) { for (WriteEntity write : outputs) { + if (write.getType() == Entity.Type.DATABASE) { + ss.getAuthorizer().authorize(write.getDatabase(), + null, op.getOutputRequiredPrivileges()); + continue; + } if (write.getType() == WriteEntity.Type.PARTITION) { Partition part = db.getPartition(write.getTable(), write @@ -581,6 +592,9 @@ private void doAuthorization(BaseSemanticAnalyzer sem) Map tableUsePartLevelAuth = new HashMap(); for (ReadEntity read : inputs) { + if (read.getType() == Entity.Type.DATABASE) { + continue; + } Table tbl = read.getTable(); if ((read.getPartition() != null) || (tbl.isPartitioned())) { String tblName = tbl.getTableName(); @@ -650,6 +664,10 @@ private void doAuthorization(BaseSemanticAnalyzer sem) // cache the results for table authorization Set tableAuthChecked = new HashSet(); for (ReadEntity read : inputs) { + if (read.getType() == Entity.Type.DATABASE) { + ss.getAuthorizer().authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null); + continue; + } Table tbl = read.getTable(); if (read.getPartition() != null) { Partition partition = read.getPartition(); @@ -706,7 +724,7 @@ public QueryPlan getPlan() { * partition needs to be locked (in any mode), all its parents should also be locked in * SHARED mode. **/ - private List getLockObjects(Table t, Partition p, HiveLockMode mode) + private List getLockObjects(Database d, Table t, Partition p, HiveLockMode mode) throws SemanticException { List locks = new LinkedList(); @@ -715,8 +733,13 @@ public QueryPlan getPlan() { String.valueOf(System.currentTimeMillis()), "IMPLICIT", plan.getQueryStr()); + if (d != null) { + locks.add(new HiveLockObj(new HiveLockObject(d.getName(), lockData), mode)); + return locks; + } if (t != null) { + locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode)); locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode)); mode = HiveLockMode.SHARED; locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode)); @@ -724,6 +747,7 @@ public QueryPlan getPlan() { } if (p != null) { + locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode)); if (!(p instanceof DummyPartition)) { locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode)); } @@ -763,6 +787,7 @@ public QueryPlan getPlan() { locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode)); locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode)); } + return locks; } @@ -810,24 +835,29 @@ public int acquireReadWriteLocks() { // If a lock needs to be acquired on any partition, a read lock needs to be acquired on all // its parents also for (ReadEntity input : plan.getInputs()) { - if (input.getType() == ReadEntity.Type.TABLE) { - lockObjects.addAll(getLockObjects(input.getTable(), null, HiveLockMode.SHARED)); + if (input.getType() == ReadEntity.Type.DATABASE) { + lockObjects.addAll(getLockObjects(input.getDatabase(), null, null, HiveLockMode.SHARED)); + } else if (input.getType() == ReadEntity.Type.TABLE) { + lockObjects.addAll(getLockObjects(null, input.getTable(), null, HiveLockMode.SHARED)); } else { - lockObjects.addAll(getLockObjects(null, input.getPartition(), HiveLockMode.SHARED)); + lockObjects.addAll(getLockObjects(null, null, input.getPartition(), HiveLockMode.SHARED)); } } for (WriteEntity output : plan.getOutputs()) { List lockObj = null; - if (output.getTyp() == WriteEntity.Type.TABLE) { - lockObj = getLockObjects(output.getTable(), null, + if (output.getType() == WriteEntity.Type.DATABASE) { + lockObjects.addAll(getLockObjects(output.getDatabase(), null, null, + output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED)); + } else if (output.getTyp() == WriteEntity.Type.TABLE) { + lockObj = getLockObjects(null, output.getTable(), null, output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED); } else if (output.getTyp() == WriteEntity.Type.PARTITION) { - lockObj = getLockObjects(null, output.getPartition(), HiveLockMode.EXCLUSIVE); + lockObj = getLockObjects(null, null, output.getPartition(), HiveLockMode.EXCLUSIVE); } // In case of dynamic queries, it is possible to have incomplete dummy partitions else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) { - lockObj = getLockObjects(null, output.getPartition(), HiveLockMode.SHARED); + lockObj = getLockObjects(null, null, output.getPartition(), HiveLockMode.SHARED); } if(lockObj != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index d32be59..41df473 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -130,6 +130,7 @@ import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.GrantDesc; import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; +import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.PartitionSpec; @@ -152,6 +153,7 @@ import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; +import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; @@ -233,6 +235,16 @@ public int execute(DriverContext driverContext) { return dropDatabase(db, dropDatabaseDesc); } + LockDatabaseDesc lockDatabaseDesc = work.getLockDatabaseDesc(); + if (lockDatabaseDesc != null) { + return lockDatabase(lockDatabaseDesc); + } + + UnlockDatabaseDesc unlockDatabaseDesc = work.getUnlockDatabaseDesc(); + if (unlockDatabaseDesc != null) { + return unlockDatabase(unlockDatabaseDesc); + } + SwitchDatabaseDesc switchDatabaseDesc = work.getSwitchDatabaseDesc(); if (switchDatabaseDesc != null) { return switchDatabase(db, switchDatabaseDesc); @@ -2468,7 +2480,7 @@ private int lockTable(LockTableDesc lockTbl) throws HiveException { HiveLockMode mode = HiveLockMode.valueOf(lockTbl.getMode()); String tabName = lockTbl.getTableName(); - Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName); + Table tbl = db.getTable(tabName); if (tbl == null) { throw new HiveException("Table " + tabName + " does not exist "); } @@ -2499,6 +2511,78 @@ private int lockTable(LockTableDesc lockTbl) throws HiveException { return 0; } + /** + * Lock the database + * + * @param lockDb + * the database to be locked along with the mode + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * Throws this exception if an unexpected error occurs. + */ + private int lockDatabase(LockDatabaseDesc lockDb) throws HiveException { + Context ctx = driverContext.getCtx(); + HiveLockManager lockMgr = ctx.getHiveLockMgr(); + if (lockMgr == null) { + throw new HiveException("lock Database LockManager not specified"); + } + + HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); + String dbName = lockDb.getDatabaseName(); + + Database dbObj = db.getDatabase(dbName); + if (dbObj == null) { + throw new HiveException("Database " + dbName + " does not exist "); + } + + HiveLockObjectData lockData = + new HiveLockObjectData(lockDb.getQueryId(), + String.valueOf(System.currentTimeMillis()), + "EXPLICIT", lockDb.getQueryStr()); + + HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); + if (lck == null) { + return 1; + } + return 0; + } + + /** + * Unlock the database specified + * + * @param unlockDb + * the database to be unlocked + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * Throws this exception if an unexpected error occurs. + */ + private int unlockDatabase(UnlockDatabaseDesc unlockDb) throws HiveException { + Context ctx = driverContext.getCtx(); + HiveLockManager lockMgr = ctx.getHiveLockMgr(); + if (lockMgr == null) { + throw new HiveException("unlock Database LockManager not specified"); + } + + String dbName = unlockDb.getDatabaseName(); + + Database dbObj = db.getDatabase(dbName); + if (dbObj == null) { + throw new HiveException("Database " + dbName + " does not exist "); + } + HiveLockObject obj = new HiveLockObject(dbObj.getName(), null); + + List locks = lockMgr.getLocks(obj, false, false); + if ((locks == null) || (locks.isEmpty())) { + throw new HiveException("Database " + dbName + " is not locked "); + } + + for (HiveLock lock: locks) { + lockMgr.unlock(lock); + + } + return 0; + } + private HiveLockObject getHiveObject(String tabName, Map partSpec) throws HiveException { Table tbl = db.getTable(tabName); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 87f7146..5cb492f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -284,7 +284,7 @@ public int execute(DriverContext driverContext) { db.loadTable(new Path(tbd.getSourceDir()), tbd.getTable() .getTableName(), tbd.getReplace(), tbd.getHoldDDLTime()); if (work.getOutputs() != null) { - work.getOutputs().add(new WriteEntity(table, true)); + work.getOutputs().add(new WriteEntity(table)); } } else { LOG.info("Partition is: " + tbd.getPartitionSpec().toString()); @@ -376,7 +376,7 @@ public int execute(DriverContext driverContext) { updatePartitionBucketSortColumns(table, partn, bucketCols, numBuckets, sortCols); } - WriteEntity enty = new WriteEntity(partn, true); + WriteEntity enty = new WriteEntity(partn); if (work.getOutputs() != null) { work.getOutputs().add(enty); } @@ -417,7 +417,7 @@ public int execute(DriverContext driverContext) { dc = new DataContainer(table.getTTable(), partn.getTPartition()); // add this partition to post-execution hook if (work.getOutputs() != null) { - work.getOutputs().add(new WriteEntity(partn, true)); + work.getOutputs().add(new WriteEntity(partn)); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java index d1e25d7..1787ef7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java @@ -22,11 +22,11 @@ import java.net.URI; import java.util.Map; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.DummyPartition; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; /** * This class encapsulates an object that is being read or written to by the @@ -40,8 +40,13 @@ * The type of the entity. */ public static enum Type { - TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR - }; + DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR + } + + /** + * The database if this is a database. + */ + private Database database; /** * The type. @@ -75,12 +80,34 @@ */ private boolean complete; + private Privilege[] inputRequiredPrivileges; + + private Privilege[] outputRequiredPrivileges; + + public void setInputRequiredPrivileges(Privilege[] inputRequiredPrivileges) { + this.inputRequiredPrivileges = + inputRequiredPrivileges == null ? new Privilege[0] : inputRequiredPrivileges; + } + + public void setOutputRequiredPrivileges(Privilege[] outputRequiredPrivileges) { + this.outputRequiredPrivileges = + outputRequiredPrivileges == null ? new Privilege[0] : outputRequiredPrivileges; + } + + public Privilege[] getInputRequiredPrivileges() { + return inputRequiredPrivileges; + } + + public Privilege[] getOutputRequiredPrivileges() { + return outputRequiredPrivileges; + } + public boolean isComplete() { return complete; } public void setComplete(boolean complete) { - this.complete = complete;; + this.complete = complete; } public String getName() { @@ -91,6 +118,14 @@ public void setName(String name) { this.name = name; } + public Database getDatabase() { + return database; + } + + public void setDatabase(Database database) { + this.database = database; + } + public Type getTyp() { return typ; } @@ -130,15 +165,26 @@ public Entity() { } /** + * Constructor for a database. + * + * @param database + * Database that is read or written to. + * @param complete + * Means the database is target, not for table or partition, etc. + */ + public Entity(Database database, boolean complete) { + this.database = database; + this.typ = Type.DATABASE; + this.name = computeName(); + this.complete = complete; + } + + /** * Constructor for a table. * * @param t * Table that is read or written to. */ - public Entity(Table t) { - this(t, true); - } - public Entity(Table t, boolean complete) { d = null; p = null; @@ -154,10 +200,6 @@ public Entity(Table t, boolean complete) { * @param p * Partition that is read or written to. */ - public Entity(Partition p) { - this(p, true); - } - public Entity(Partition p, boolean complete) { d = null; this.p = p; @@ -176,18 +218,6 @@ public Entity(DummyPartition p, boolean complete) { this.complete = complete; } - /** - * Constructor for a file. - * - * @param d - * The name of the directory that is being read or written to. - * @param islocal - * Flag to decide whether this directory is local or in dfs. - */ - public Entity(String d, boolean islocal) { - this(d, islocal, true); - } - public Entity(String d, boolean islocal, boolean complete) { this.d = d; p = null; @@ -223,6 +253,10 @@ public Type getType() { * Get the location of the entity. */ public URI getLocation() throws Exception { + if (typ == Type.DATABASE) { + return null; + } + if (typ == Type.TABLE) { return t.getDataLocation(); } @@ -262,6 +296,8 @@ public String toString() { private String computeName() { switch (typ) { + case DATABASE: + return "database:" + database.getName(); case TABLE: return t.getDbName() + "@" + t.getTableName(); case PARTITION: diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 555faca..9316a82 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -22,6 +22,7 @@ import java.util.HashSet; import java.util.Set; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -46,13 +47,20 @@ public ReadEntity() { } /** + * Constructor for a database. + */ + public ReadEntity(Database database) { + super(database, true); + } + + /** * Constructor. * * @param t * The Table that the query reads from. */ public ReadEntity(Table t) { - super(t); + super(t, true); } private void initParent(ReadEntity parent) { @@ -62,7 +70,7 @@ private void initParent(ReadEntity parent) { } public ReadEntity(Table t, ReadEntity parent) { - super(t); + super(t, true); initParent(parent); } @@ -73,11 +81,11 @@ public ReadEntity(Table t, ReadEntity parent) { * The partition that the query reads from. */ public ReadEntity(Partition p) { - super(p); + super(p, true); } public ReadEntity(Partition p, ReadEntity parent) { - super(p); + super(p, true); initParent(parent); } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java index f745305..0493302 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java @@ -20,6 +20,8 @@ import java.io.Serializable; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.DummyPartition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -37,6 +39,10 @@ public WriteEntity() { super(); } + public WriteEntity(Database database) { + super(database, true); + } + /** * Constructor for a table. * @@ -44,7 +50,7 @@ public WriteEntity() { * Table that is written to. */ public WriteEntity(Table t) { - this(t, true); + super(t, true); } public WriteEntity(Table t, boolean complete) { @@ -58,11 +64,7 @@ public WriteEntity(Table t, boolean complete) { * Partition that is written to. */ public WriteEntity(Partition p) { - this(p, true); - } - - public WriteEntity(Partition p, boolean complete) { - super(p, complete); + super(p, true); } public WriteEntity(DummyPartition p, boolean complete) { @@ -77,12 +79,8 @@ public WriteEntity(DummyPartition p, boolean complete) { * @param islocal * Flag to decide whether this directory is local or in dfs. */ - public WriteEntity(String d, boolean islocal) { - this(d, islocal, true); - } - - public WriteEntity(String d, boolean islocal, boolean complete) { - super(d, islocal, complete); + public WriteEntity(Path d, boolean islocal) { + super(d.toString(), islocal, true); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index da80d81..cbe7091 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -39,6 +39,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; @@ -67,6 +68,7 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.PlanUtils; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -77,7 +79,6 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; -import org.apache.hadoop.util.StringUtils; import com.google.common.annotations.VisibleForTesting; @@ -1243,4 +1244,83 @@ private static String normalizeDateCol( } return partitionDateFormat.format(value); } + + protected Database getDatabase(String dbName) throws SemanticException { + return getDatabase(dbName, true); + } + + protected Database getDatabase(String dbName, boolean throwException) throws SemanticException { + try { + Database database = db.getDatabase(dbName); + if (database == null && throwException) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName)); + } + return database; + } catch (HiveException e) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName), e); + } + } + + protected Table getTable(String tblName) throws SemanticException { + return getTable(null, tblName, true); + } + + protected Table getTable(String tblName, boolean throwException) throws SemanticException { + String currentDb = SessionState.get().getCurrentDatabase(); + return getTable(currentDb, tblName, throwException); + } + + protected Table getTableWithQN(String qnName, boolean throwException) throws SemanticException { + int dot = qnName.indexOf('.'); + if (dot < 0) { + String currentDb = SessionState.get().getCurrentDatabase(); + return getTable(currentDb, qnName, throwException); + } + return getTable(qnName.substring(0, dot), qnName.substring(dot + 1), throwException); + } + + protected Table getTable(String database, String tblName, boolean throwException) + throws SemanticException { + try { + Table tab = database == null ? db.getTable(tblName, false) + : db.getTable(database, tblName, false); + if (tab == null && throwException) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); + } + return tab; + } catch (HiveException e) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName), e); + } + } + + protected Partition getPartition(Table table, Map partSpec, + boolean throwException) throws SemanticException { + try { + Partition partition = db.getPartition(table, partSpec, false); + if (partition == null && throwException) { + throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); + } + return partition; + } catch (HiveException e) { + throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); + } + } + + protected List getPartitions(Table table, Map partSpec, + boolean throwException) throws SemanticException { + try { + List partitions = partSpec == null ? db.getPartitions(table) : + db.getPartitions(table, partSpec); + if (partitions.isEmpty() && throwException) { + throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); + } + return partitions; + } catch (HiveException e) { + throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); + } + } + + protected String toMessage(ErrorMsg message, Object detail) { + return detail == null ? message.getMsg() : message.getMsg(detail.toString()); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 7443ea4..472f5c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -95,6 +96,7 @@ import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; +import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; @@ -121,6 +123,7 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; +import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; @@ -298,6 +301,10 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowLocks(ast); break; + case HiveParser.TOK_SHOWDBLOCKS: + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowDbLocks(ast); + break; case HiveParser.TOK_DESCFUNCTION: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescFunction(ast); @@ -394,6 +401,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { case HiveParser.TOK_UNLOCKTABLE: analyzeUnlockTable(ast); break; + case HiveParser.TOK_LOCKDB: + analyzeLockDatabase(ast); + break; + case HiveParser.TOK_UNLOCKDB: + analyzeUnlockDatabase(ast); + break; case HiveParser.TOK_CREATEDATABASE: analyzeCreateDatabase(ast); break; @@ -809,6 +822,14 @@ private void analyzeDropDatabase(ASTNode ast) throws SemanticException { ifCascade = true; } + Database database = getDatabase(dbName, !ifExists); + if (database == null) { + return; + } + + inputs.add(new ReadEntity(database)); + outputs.add(new WriteEntity(database)); + DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), conf)); } @@ -2274,6 +2295,29 @@ private void analyzeShowLocks(ASTNode ast) throws SemanticException { ctx.setNeedLockMgr(true); } + /** + * Add the task according to the parsed command tree. This is used for the CLI + * command "SHOW LOCKS;". + * + * @param ast + * The parsed command tree. + * @throws SemanticException + * Parsing failed + */ + private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { + boolean isExtended = (ast.getChildCount() > 1); + String dbName = stripQuotes(ast.getChild(0).getText()); + + ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), dbName, + isExtended); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + showLocksDesc), conf)); + setFetchTask(createFetchTask(showLocksDesc.getSchema())); + + // Need to initialize the lock manager + ctx.setNeedLockMgr(true); + } + /** * Add the task according to the parsed command tree. This is used for the CLI * command "LOCK TABLE ..;". @@ -2335,6 +2379,30 @@ private void analyzeUnlockTable(ASTNode ast) ctx.setNeedLockMgr(true); } + private void analyzeLockDatabase(ASTNode ast) throws SemanticException { + String dbName = unescapeIdentifier(ast.getChild(0).getText()); + String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); + + //inputs.add(new ReadEntity(dbName)); + //outputs.add(new WriteEntity(dbName)); + LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, + HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); + lockDatabaseDesc.setQueryStr(ctx.getCmd()); + DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); + rootTasks.add(TaskFactory.get(work, conf)); + ctx.setNeedLockMgr(true); + } + + private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { + String dbName = unescapeIdentifier(ast.getChild(0).getText()); + + UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); + DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); + rootTasks.add(TaskFactory.get(work, conf)); + // Need to initialize the lock manager + ctx.setNeedLockMgr(true); + } + /** * Add the task according to the parsed command tree. This is used for the CLI * command "DESCRIBE FUNCTION;". @@ -2531,7 +2599,7 @@ private void analyzeAlterTableAlterParts(ASTNode ast) // check if table exists. try { - tab = db.getTable(SessionState.get().getCurrentDatabase(), tblName, true); + tab = getTable(tblName, true); inputs.add(new ReadEntity(tab)); } catch (HiveException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); @@ -3280,57 +3348,4 @@ private void validateSkewedLocationString(String newLocation) throws SemanticExc throw new SemanticException(e); } } - - private Table getTable(String tblName) throws SemanticException { - return getTable(null, tblName, true); - } - - private Table getTable(String tblName, boolean throwException) throws SemanticException { - return getTable(SessionState.get().getCurrentDatabase(), tblName, throwException); - } - - private Table getTable(String database, String tblName, boolean throwException) - throws SemanticException { - try { - Table tab = database == null ? db.getTable(tblName, false) - : db.getTable(database, tblName, false); - if (tab == null && throwException) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); - } - return tab; - } catch (HiveException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); - } - } - - private Partition getPartition(Table table, Map partSpec, boolean throwException) - throws SemanticException { - try { - Partition partition = db.getPartition(table, partSpec, false); - if (partition == null && throwException) { - throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); - } - return partition; - } catch (HiveException e) { - throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); - } - } - - private List getPartitions(Table table, Map partSpec, - boolean throwException) throws SemanticException { - try { - List partitions = partSpec == null ? db.getPartitions(table) : - db.getPartitions(table, partSpec); - if (partitions.isEmpty() && throwException) { - throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec)); - } - return partitions; - } catch (HiveException e) { - throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e); - } - } - - private String toMessage(ErrorMsg message, Object detail) { - return detail == null ? message.getMsg() : message.getMsg(detail.toString()); - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java index 21e8ad5..0c308a6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -101,10 +101,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { .getMsg("Exception while writing out the local file"), e); } + Path parentPath = new Path(toURI.toString()); + if (ts.tableHandle.isPartitioned()) { for (Partition partition : partitions) { URI fromURI = partition.getDataLocation(); - Path toPartPath = new Path(toURI.toString(), partition.getName()); + Path toPartPath = new Path(parentPath, partition.getName()); Task rTask = TaskFactory.get( new CopyWork(fromURI.toString(), toPartPath.toString(), false), conf); @@ -113,13 +115,12 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { } } else { URI fromURI = ts.tableHandle.getDataLocation(); - Path toDataPath = new Path(toURI.toString(), "data"); + Path toDataPath = new Path(parentPath, "data"); Task rTask = TaskFactory.get(new CopyWork( fromURI.toString(), toDataPath.toString(), false), conf); rootTasks.add(rTask); inputs.add(new ReadEntity(ts.tableHandle)); } - outputs.add(new WriteEntity(toURI.toString(), - toURI.getScheme().equals("hdfs") ? true : false)); + outputs.add(new WriteEntity(parentPath, toURI.getScheme().equals("hdfs"))); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 5e5b8cf..b146df6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -164,6 +164,8 @@ TOK_SHOW_TBLPROPERTIES; TOK_SHOWLOCKS; TOK_LOCKTABLE; TOK_UNLOCKTABLE; +TOK_LOCKDB; +TOK_UNLOCKDB; TOK_SWITCHDATABASE; TOK_DROPDATABASE; TOK_DROPTABLE; @@ -273,6 +275,7 @@ TOK_GRANT_ROLE; TOK_REVOKE_ROLE; TOK_SHOW_ROLE_GRANT; TOK_SHOWINDEXES; +TOK_SHOWDBLOCKS; TOK_INDEXCOMMENT; TOK_DESCDATABASE; TOK_DATABASEPROPERTIES; @@ -624,6 +627,8 @@ ddlStatement | analyzeStatement | lockStatement | unlockStatement + | lockDatabase + | unlockDatabase | createRoleStatement | dropRoleStatement | grantPrivileges @@ -1237,6 +1242,7 @@ showStatement -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?) | KW_SHOW KW_TBLPROPERTIES tblName=identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?) | KW_SHOW KW_LOCKS (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWLOCKS $parttype? $isExtended?) + | KW_SHOW KW_LOCKS KW_DATABASE (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?) | KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)? -> ^(TOK_SHOWINDEXES showStmtIdentifier $showOptions? $db_name?) ; @@ -1247,6 +1253,12 @@ lockStatement : KW_LOCK KW_TABLE tableName partitionSpec? lockMode -> ^(TOK_LOCKTABLE tableName lockMode partitionSpec?) ; +lockDatabase +@init { msgs.push("lock database statement"); } +@after { msgs.pop(); } + : KW_LOCK KW_DATABASE (dbName=Identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode) + ; + lockMode @init { msgs.push("lock mode"); } @after { msgs.pop(); } @@ -1259,6 +1271,12 @@ unlockStatement : KW_UNLOCK KW_TABLE tableName partitionSpec? -> ^(TOK_UNLOCKTABLE tableName partitionSpec?) ; +unlockDatabase +@init { msgs.push("unlock database statement"); } +@after { msgs.pop(); } + : KW_UNLOCK KW_DATABASE (dbName=Identifier) -> ^(TOK_UNLOCKDB $dbName) + ; + createRoleStatement @init { msgs.push("create role"); } @after { msgs.pop(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index e9d9ee7..1bc3450 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -5535,7 +5535,7 @@ private Operator genFileSinkPlan(String dest, QB qb, Operator input) table_desc = PlanUtils.getTableDesc(tblDesc, cols, colTypes); } - if (!outputs.add(new WriteEntity(destStr, !isDfsDir))) { + if (!outputs.add(new WriteEntity(dest_path, !isDfsDir))) { throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES .getMsg(destStr)); } @@ -8515,7 +8515,7 @@ private void setupStats(TableScanDesc tsDesc, QBParseInfo qbp, Table tab, String tsDesc.setStatsAggPrefix(k); // set up WritenEntity for replication - outputs.add(new WriteEntity(tab, true)); + outputs.add(new WriteEntity(tab)); // add WriteEntity for each matching partition if (tab.isPartitioned()) { @@ -8526,7 +8526,7 @@ private void setupStats(TableScanDesc tsDesc, QBParseInfo qbp, Table tab, String if (partitions != null) { for (Partition partn : partitions) { // inputs.add(new ReadEntity(partn)); // is this needed at all? - outputs.add(new WriteEntity(partn, true)); + outputs.add(new WriteEntity(partn)); } } } @@ -9513,7 +9513,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb) // check for existence of table if (ifNotExists) { try { - Table table = db.getTable(tableName, false); // use getTable(final String tableName, boolean + Table table = getTableWithQN(tableName, false); // use getTable(final String tableName, boolean // throwException) which doesn't throw // exception but null if table doesn't exist if (table != null) { // table exists @@ -9669,7 +9669,7 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) private void validateCreateView(CreateViewDesc createVwDesc) throws SemanticException { try { - Table oldView = db.getTable(createVwDesc.getViewName(), false); + Table oldView = getTableWithQN(createVwDesc.getViewName(), false); // ALTER VIEW AS SELECT requires the view must exist if (createVwDesc.getIsAlterViewAs() && oldView == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 97454e4..542d59a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -68,6 +68,7 @@ commandType.put(HiveParser.TOK_SHOWINDEXES, HiveOperation.SHOWINDEXES); commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS); commandType.put(HiveParser.TOK_SHOWLOCKS, HiveOperation.SHOWLOCKS); + commandType.put(HiveParser.TOK_SHOWDBLOCKS, HiveOperation.SHOWLOCKS); commandType.put(HiveParser.TOK_CREATEFUNCTION, HiveOperation.CREATEFUNCTION); commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION); commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO); @@ -85,6 +86,8 @@ commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY); commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE); commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE); + commandType.put(HiveParser.TOK_LOCKDB, HiveOperation.LOCKDB); + commandType.put(HiveParser.TOK_UNLOCKDB, HiveOperation.UNLOCKDB); commandType.put(HiveParser.TOK_CREATEROLE, HiveOperation.CREATEROLE); commandType.put(HiveParser.TOK_DROPROLE, HiveOperation.DROPROLE); commandType.put(HiveParser.TOK_GRANT, HiveOperation.GRANT_PRIVILEGE); @@ -185,6 +188,7 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_SHOWPARTITIONS: case HiveParser.TOK_SHOWINDEXES: case HiveParser.TOK_SHOWLOCKS: + case HiveParser.TOK_SHOWDBLOCKS: case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: @@ -194,6 +198,8 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_ALTERTABLE_ALTERPARTS: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: + case HiveParser.TOK_LOCKDB: + case HiveParser.TOK_UNLOCKDB: case HiveParser.TOK_CREATEROLE: case HiveParser.TOK_DROPROLE: case HiveParser.TOK_GRANT: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index 060fcdd..409e0a7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -36,6 +36,8 @@ private CreateDatabaseDesc createDatabaseDesc; private SwitchDatabaseDesc switchDatabaseDesc; private DropDatabaseDesc dropDatabaseDesc; + private LockDatabaseDesc lockDatabaseDesc; + private UnlockDatabaseDesc unlockDatabaseDesc; private CreateTableDesc createTblDesc; private CreateTableLikeDesc createTblLikeDesc; private CreateViewDesc createVwDesc; @@ -284,6 +286,24 @@ public DDLWork(HashSet inputs, HashSet outputs, } /** + * @param lockDatabaseDesc + */ + public DDLWork(HashSet inputs, HashSet outputs, + LockDatabaseDesc lockDatabaseDesc) { + this(inputs, outputs); + this.lockDatabaseDesc = lockDatabaseDesc; + } + + /** + * @param unlockDatabaseDesc + */ + public DDLWork(HashSet inputs, HashSet outputs, + UnlockDatabaseDesc unlockDatabaseDesc) { + this(inputs, outputs); + this.unlockDatabaseDesc = unlockDatabaseDesc; + } + + /** * @param showFuncsDesc */ public DDLWork(HashSet inputs, HashSet outputs, @@ -501,6 +521,22 @@ public void setSwitchDatabaseDesc(SwitchDatabaseDesc switchDatabaseDesc) { this.switchDatabaseDesc = switchDatabaseDesc; } + public LockDatabaseDesc getLockDatabaseDesc() { + return lockDatabaseDesc; + } + + public void setLockDatabaseDesc(LockDatabaseDesc lockDatabaseDesc) { + this.lockDatabaseDesc = lockDatabaseDesc; + } + + public UnlockDatabaseDesc getUnlockDatabaseDesc() { + return unlockDatabaseDesc; + } + + public void setUnlockDatabaseDesc(UnlockDatabaseDesc unlockDatabaseDesc) { + this.unlockDatabaseDesc = unlockDatabaseDesc; + } + /** * @return the createTblDesc */ diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 69cda05..bfd6b77 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -25,9 +25,11 @@ LOAD("LOAD", null, new Privilege[]{Privilege.ALTER_DATA}), EXPORT("EXPORT", new Privilege[]{Privilege.SELECT}, null), IMPORT("IMPORT", null, new Privilege[]{Privilege.ALTER_METADATA, Privilege.ALTER_DATA}), - CREATEDATABASE("CREATEDATABASE", null, null), - DROPDATABASE("DROPDATABASE", null, null), - SWITCHDATABASE("SWITCHDATABASE", null, null), + CREATEDATABASE("CREATEDATABASE", null, new Privilege[]{Privilege.CREATE}), + DROPDATABASE("DROPDATABASE", null, new Privilege[]{Privilege.DROP}), + SWITCHDATABASE("SWITCHDATABASE", new Privilege[]{Privilege.SELECT}, null), + LOCKDB("LOCKDATABASE", new Privilege[]{Privilege.LOCK}, null), + UNLOCKDB("UNLOCKDATABASE", new Privilege[]{Privilege.LOCK}, null), DROPTABLE ("DROPTABLE", null, new Privilege[]{Privilege.DROP}), DESCTABLE("DESCTABLE", null, null), DESCFUNCTION("DESCFUNCTION", null, null), diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java new file mode 100644 index 0000000..cb66d54 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * LockDatabaseDesc. + * + */ +@Explain(displayName = "Lock Database") +public class LockDatabaseDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + + private String databaseName; + private String mode; + private String queryId; + private String queryStr; + + public LockDatabaseDesc() { + } + + public LockDatabaseDesc(String databaseName, String mode, String queryId) { + this.databaseName = databaseName; + this.mode = mode; + this.queryId = queryId; + } + + @Explain(displayName = "database") + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public String getMode() { + return mode; + } + + public String getQueryId() { + return queryId; + } + + public void setQueryId(String queryId) { + this.queryId = queryId; + } + + public String getQueryStr() { + return queryStr; + } + + public void setQueryStr(String queryStr) { + this.queryStr = queryStr; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java index 37bdf37..3eee8de 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java @@ -31,6 +31,7 @@ public class ShowLocksDesc extends DDLDesc implements Serializable { private static final long serialVersionUID = 1L; String resFile; + String dbName; String tableName; HashMap partSpec; boolean isExt; @@ -44,6 +45,10 @@ */ private static final String schema = "tab_name,mode#string:string"; + public String getDatabase() { + return dbName; + } + public String getTable() { return table; } @@ -58,6 +63,17 @@ public ShowLocksDesc() { /** * @param resFile */ + public ShowLocksDesc(Path resFile, String dbName, boolean isExt) { + this.resFile = resFile.toString(); + this.partSpec = null; + this.tableName = null; + this.isExt = isExt; + this.dbName = dbName; + } + + /** + * @param resFile + */ public ShowLocksDesc(Path resFile, String tableName, HashMap partSpec, boolean isExt) { this.resFile = resFile.toString(); @@ -66,6 +82,14 @@ public ShowLocksDesc(Path resFile, String tableName, this.isExt = isExt; } + public String getDbName() { + return dbName; + } + + public void setDbName(String dbName) { + this.dbName = dbName; + } + /** * @return the tableName */ diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java new file mode 100644 index 0000000..5c21aa9 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * UnlockDatabaseDesc. + * + */ +@Explain(displayName = "Unlock Database") +public class UnlockDatabaseDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + + private String databaseName; + + public UnlockDatabaseDesc(String databaseName) { + this.databaseName = databaseName; + } + + @Explain(displayName = "database") + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + +} diff --git ql/src/test/queries/clientnegative/authorization_fail_create_db.q ql/src/test/queries/clientnegative/authorization_fail_create_db.q new file mode 100644 index 0000000..d969e39 --- /dev/null +++ ql/src/test/queries/clientnegative/authorization_fail_create_db.q @@ -0,0 +1,5 @@ +set hive.security.authorization.enabled=true; + +create database db_to_fail; + + diff --git ql/src/test/queries/clientnegative/authorization_fail_drop_db.q ql/src/test/queries/clientnegative/authorization_fail_drop_db.q new file mode 100644 index 0000000..87719b0 --- /dev/null +++ ql/src/test/queries/clientnegative/authorization_fail_drop_db.q @@ -0,0 +1,5 @@ +set hive.security.authorization.enabled=false; +create database db_fail_to_drop; +set hive.security.authorization.enabled=true; + +drop database db_fail_to_drop; diff --git ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q new file mode 100644 index 0000000..4966f2b --- /dev/null +++ ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q @@ -0,0 +1,17 @@ +create database lockneg1; +use lockneg1; + +create table tstsrcpart like default.srcpart; + +insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11'; + +lock database lockneg1 shared; +show locks database lockneg1; +select count(1) from tstsrcpart where ds='2008-04-08' and hr='11'; + +unlock database lockneg1; +show locks database lockneg1; +lock database lockneg1 exclusive; +show locks database lockneg1; +select count(1) from tstsrcpart where ds='2008-04-08' and hr='11'; diff --git ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q new file mode 100644 index 0000000..1f9ad90 --- /dev/null +++ ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q @@ -0,0 +1,6 @@ +set hive.lock.numretries=0; + +create database lockneg4; + +lock database lockneg4 exclusive; +lock database lockneg4 shared; diff --git ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q new file mode 100644 index 0000000..8cbe310 --- /dev/null +++ ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q @@ -0,0 +1,8 @@ +set hive.lock.numretries=0; + +create database lockneg9; + +lock database lockneg9 shared; +show locks database lockneg9; + +drop database lockneg9; diff --git ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q new file mode 100644 index 0000000..4127a6f --- /dev/null +++ ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q @@ -0,0 +1,15 @@ +set hive.lock.numretries=0; + +create database lockneg2; +use lockneg2; + +create table tstsrcpart like default.srcpart; + +insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11'; + +lock table tstsrcpart shared; +show locks; + +lock database lockneg2 exclusive; +show locks; diff --git ql/src/test/results/clientnegative/authorization_fail_2.q.out ql/src/test/results/clientnegative/authorization_fail_2.q.out index 7c1b609..2c03b65 100644 --- ql/src/test/results/clientnegative/authorization_fail_2.q.out +++ ql/src/test/results/clientnegative/authorization_fail_2.q.out @@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table authorization_fail_2 (key int, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@authorization_fail_2 -Authorization failed:No privilege 'Create' found for inputs { database:default, table:authorization_fail_2}. Use show grant to get more details. +Authorization failed:No privilege 'Create' found for inputs { database:default, table:authorization_fail_2}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_3.q.out ql/src/test/results/clientnegative/authorization_fail_3.q.out index 221fcad..30d66af 100644 --- ql/src/test/results/clientnegative/authorization_fail_3.q.out +++ ql/src/test/results/clientnegative/authorization_fail_3.q.out @@ -36,4 +36,4 @@ PREHOOK: query: show grant user hive_test_user on table authorization_fail_3 par PREHOOK: type: SHOW_GRANT POSTHOOK: query: show grant user hive_test_user on table authorization_fail_3 partition (ds='2010') POSTHOOK: type: SHOW_GRANT -Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_3, columnName:key}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_3, columnName:key}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_4.q.out ql/src/test/results/clientnegative/authorization_fail_4.q.out index 9e5cbe7..3cc1792 100644 --- ql/src/test/results/clientnegative/authorization_fail_4.q.out +++ ql/src/test/results/clientnegative/authorization_fail_4.q.out @@ -76,4 +76,4 @@ principalType USER privilege Create #### A masked pattern was here #### grantor hive_test_user -Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_4, partitionName:ds=2010, columnName:key}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_4, partitionName:ds=2010, columnName:key}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_5.q.out ql/src/test/results/clientnegative/authorization_fail_5.q.out index 1b4f189..a83f794 100644 --- ql/src/test/results/clientnegative/authorization_fail_5.q.out +++ ql/src/test/results/clientnegative/authorization_fail_5.q.out @@ -127,4 +127,4 @@ principalType USER privilege Create #### A masked pattern was here #### grantor hive_test_user -Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, partitionName:ds=2010, columnName:key}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, partitionName:ds=2010, columnName:key}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_6.q.out ql/src/test/results/clientnegative/authorization_fail_6.q.out index 6958194..47f8bd1 100644 --- ql/src/test/results/clientnegative/authorization_fail_6.q.out +++ ql/src/test/results/clientnegative/authorization_fail_6.q.out @@ -7,4 +7,4 @@ POSTHOOK: query: -- SORT_BEFORE_DIFF create table authorization_part_fail (key int, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@authorization_part_fail -Authorization failed:No privilege 'Alter' found for inputs { database:default, table:authorization_part_fail}. Use show grant to get more details. +Authorization failed:No privilege 'Alter' found for inputs { database:default, table:authorization_part_fail}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_7.q.out ql/src/test/results/clientnegative/authorization_fail_7.q.out index b93d7a2..e87b00f 100644 --- ql/src/test/results/clientnegative/authorization_fail_7.q.out +++ ql/src/test/results/clientnegative/authorization_fail_7.q.out @@ -43,4 +43,4 @@ PREHOOK: query: drop role hive_test_role_fail PREHOOK: type: DROPROLE POSTHOOK: query: drop role hive_test_role_fail POSTHOOK: type: DROPROLE -Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, columnName:key}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, columnName:key}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_create_db.q.out ql/src/test/results/clientnegative/authorization_fail_create_db.q.out new file mode 100644 index 0000000..f6b25b1 --- /dev/null +++ ql/src/test/results/clientnegative/authorization_fail_create_db.q.out @@ -0,0 +1 @@ +Authorization failed:No privilege 'Create' found for outputs { }. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out new file mode 100644 index 0000000..da6e47b --- /dev/null +++ ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out @@ -0,0 +1,5 @@ +PREHOOK: query: create database db_fail_to_drop +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database db_fail_to_drop +POSTHOOK: type: CREATEDATABASE +Authorization failed:No privilege 'Drop' found for outputs { database:db_fail_to_drop}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/authorization_part.q.out ql/src/test/results/clientnegative/authorization_part.q.out index c45fd4f..664463e 100644 --- ql/src/test/results/clientnegative/authorization_part.q.out +++ ql/src/test/results/clientnegative/authorization_part.q.out @@ -274,4 +274,4 @@ POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).key EXPRESSION [(s POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).key EXPRESSION [(src_auth)src_auth.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ] -Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_part_fail, partitionName:ds=2010, columnName:value}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_part_fail, partitionName:ds=2010, columnName:value}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out index 6cb2e95..2f2c0e3 100644 --- ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out +++ ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out @@ -3,7 +3,4 @@ PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES POSTHOOK: type: SHOWDATABASES default -PREHOOK: query: -- Try to drop a database that does not exist -DROP DATABASE does_not_exist -PREHOOK: type: DROPDATABASE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: does_not_exist +FAILED: SemanticException [Error 10072]: Database does not exist: does_not_exist diff --git ql/src/test/results/clientnegative/database_drop_not_empty.q.out ql/src/test/results/clientnegative/database_drop_not_empty.q.out index ce7b6b2..6ccf95a 100644 --- ql/src/test/results/clientnegative/database_drop_not_empty.q.out +++ ql/src/test/results/clientnegative/database_drop_not_empty.q.out @@ -24,4 +24,6 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE test_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:test_db +PREHOOK: Output: database:test_db FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database test_db is not empty) diff --git ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out index 492590a..c8275af 100644 --- ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out +++ ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out @@ -24,4 +24,6 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE db_drop_non_empty_restrict PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:db_drop_non_empty_restrict +PREHOOK: Output: database:db_drop_non_empty_restrict FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty) diff --git ql/src/test/results/clientnegative/exim_22_export_authfail.q.out ql/src/test/results/clientnegative/exim_22_export_authfail.q.out index cc8988f..1339bbc 100644 --- ql/src/test/results/clientnegative/exim_22_export_authfail.q.out +++ ql/src/test/results/clientnegative/exim_22_export_authfail.q.out @@ -4,4 +4,4 @@ POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@exim_department #### A masked pattern was here #### -Authorization failed:No privilege 'Select' found for inputs { database:default, table:exim_department}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:exim_department}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out index 6260433..22eaac7 100644 --- ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out +++ ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out @@ -39,4 +39,4 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_department}. Use show grant to get more details. +Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_department}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out index e43c4d2..6eee71e 100644 --- ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out +++ ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out @@ -58,4 +58,4 @@ POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_employee -Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_employee}. Use show grant to get more details. +Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_employee}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out index db4578f..fb4224c 100644 --- ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out +++ ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out @@ -34,4 +34,4 @@ PREHOOK: query: use importer PREHOOK: type: SWITCHDATABASE POSTHOOK: query: use importer POSTHOOK: type: SWITCHDATABASE -Authorization failed:No privilege 'Create' found for outputs { database:importer}. Use show grant to get more details. +Authorization failed:No privilege 'Create' found for outputs { database:importer}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/join_nonexistent_part.q.out ql/src/test/results/clientnegative/join_nonexistent_part.q.out index 8380d75..c2a85aa 100644 --- ql/src/test/results/clientnegative/join_nonexistent_part.q.out +++ ql/src/test/results/clientnegative/join_nonexistent_part.q.out @@ -1 +1 @@ -Authorization failed:No privilege 'Select' found for inputs { database:default, table:src, columnName:key}. Use show grant to get more details. +Authorization failed:No privilege 'Select' found for inputs { database:default, table:src, columnName:key}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/load_exist_part_authfail.q.out ql/src/test/results/clientnegative/load_exist_part_authfail.q.out index 8ef0341..fbbdd1c 100644 --- ql/src/test/results/clientnegative/load_exist_part_authfail.q.out +++ ql/src/test/results/clientnegative/load_exist_part_authfail.q.out @@ -10,4 +10,4 @@ POSTHOOK: query: alter table hive_test_src add partition (pcol1 = 'test_part') POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Input: default@hive_test_src POSTHOOK: Output: default@hive_test_src@pcol1=test_part -Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use show grant to get more details. +Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/load_nonpart_authfail.q.out ql/src/test/results/clientnegative/load_nonpart_authfail.q.out index ff06d9e..1c364a5 100644 --- ql/src/test/results/clientnegative/load_nonpart_authfail.q.out +++ ql/src/test/results/clientnegative/load_nonpart_authfail.q.out @@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table hive_test_src ( col1 string ) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hive_test_src -Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use show grant to get more details. +Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/load_part_authfail.q.out ql/src/test/results/clientnegative/load_part_authfail.q.out index 98da9b0..afc0aa4 100644 --- ql/src/test/results/clientnegative/load_part_authfail.q.out +++ ql/src/test/results/clientnegative/load_part_authfail.q.out @@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE POSTHOOK: query: create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hive_test_src -Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use show grant to get more details. +Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use SHOW GRANT to get more details. diff --git ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out new file mode 100644 index 0000000..b1b773a --- /dev/null +++ ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out @@ -0,0 +1,55 @@ +PREHOOK: query: create database lockneg1 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database lockneg1 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use lockneg1 +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use lockneg1 +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table tstsrcpart like default.srcpart +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table tstsrcpart like default.srcpart +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: lockneg1@tstsrcpart +PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11' +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: lockneg1@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcpart +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: lockneg1@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: lock database lockneg1 shared +PREHOOK: type: LOCKDATABASE +POSTHOOK: query: lock database lockneg1 shared +POSTHOOK: type: LOCKDATABASE +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: show locks database lockneg1 +PREHOOK: type: SHOWLOCKS +POSTHOOK: query: show locks database lockneg1 +POSTHOOK: type: SHOWLOCKS +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select count(1) from tstsrcpart where ds='2008-04-08' and hr='11' +PREHOOK: type: QUERY +PREHOOK: Input: lockneg1@tstsrcpart +PREHOOK: Input: lockneg1@tstsrcpart@ds=2008-04-08/hr=11 +#### A masked pattern was here #### +POSTHOOK: query: select count(1) from tstsrcpart where ds='2008-04-08' and hr='11' +POSTHOOK: type: QUERY +POSTHOOK: Input: lockneg1@tstsrcpart +POSTHOOK: Input: lockneg1@tstsrcpart@ds=2008-04-08/hr=11 +#### A masked pattern was here #### +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +500 +PREHOOK: query: unlock database lockneg1 +PREHOOK: type: UNLOCKDATABASE +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database lockneg1 is not locked diff --git ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out new file mode 100644 index 0000000..a9833a8 --- /dev/null +++ ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out @@ -0,0 +1,12 @@ +PREHOOK: query: create database lockneg4 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database lockneg4 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: lock database lockneg4 exclusive +PREHOOK: type: LOCKDATABASE +POSTHOOK: query: lock database lockneg4 exclusive +POSTHOOK: type: LOCKDATABASE +PREHOOK: query: lock database lockneg4 shared +PREHOOK: type: LOCKDATABASE +conflicting lock present for lockneg4 mode SHARED +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out new file mode 100644 index 0000000..d67365a --- /dev/null +++ ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out @@ -0,0 +1,14 @@ +PREHOOK: query: create database lockneg9 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database lockneg9 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: lock database lockneg9 shared +PREHOOK: type: LOCKDATABASE +POSTHOOK: query: lock database lockneg9 shared +POSTHOOK: type: LOCKDATABASE +PREHOOK: query: show locks database lockneg9 +PREHOOK: type: SHOWLOCKS +POSTHOOK: query: show locks database lockneg9 +POSTHOOK: type: SHOWLOCKS +conflicting lock present for lockneg9 mode EXCLUSIVE +FAILED: Error in acquiring locks: Locks on the underlying objects cannot be acquired. retry after some time diff --git ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out new file mode 100644 index 0000000..03218e8 --- /dev/null +++ ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create database lockneg2 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database lockneg2 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use lockneg2 +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use lockneg2 +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table tstsrcpart like default.srcpart +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table tstsrcpart like default.srcpart +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: lockneg2@tstsrcpart +PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11' +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: lockneg2@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11') +select key, value from default.srcpart where ds='2008-04-08' and hr='11' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcpart +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: lockneg2@tstsrcpart@ds=2008-04-08/hr=11 +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: lock table tstsrcpart shared +PREHOOK: type: LOCKTABLE +POSTHOOK: query: lock table tstsrcpart shared +POSTHOOK: type: LOCKTABLE +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: show locks +PREHOOK: type: SHOWLOCKS +POSTHOOK: query: show locks +POSTHOOK: type: SHOWLOCKS +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +lockneg2@tstsrcpart SHARED +PREHOOK: query: lock database lockneg2 exclusive +PREHOOK: type: LOCKDATABASE +conflicting lock present for lockneg2 mode EXCLUSIVE +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git ql/src/test/results/clientpositive/alter1.q.out ql/src/test/results/clientpositive/alter1.q.out index ae17dcd..c4bca4a 100644 --- ql/src/test/results/clientpositive/alter1.q.out +++ ql/src/test/results/clientpositive/alter1.q.out @@ -363,5 +363,9 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE alter1_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:alter1_db +PREHOOK: Output: database:alter1_db POSTHOOK: query: DROP DATABASE alter1_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:alter1_db +POSTHOOK: Output: database:alter1_db diff --git ql/src/test/results/clientpositive/alter2.q.out ql/src/test/results/clientpositive/alter2.q.out index c90a189..acfd993 100644 --- ql/src/test/results/clientpositive/alter2.q.out +++ ql/src/test/results/clientpositive/alter2.q.out @@ -371,5 +371,9 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE alter2_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:alter2_db +PREHOOK: Output: database:alter2_db POSTHOOK: query: DROP DATABASE alter2_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:alter2_db +POSTHOOK: Output: database:alter2_db diff --git ql/src/test/results/clientpositive/alter4.q.out ql/src/test/results/clientpositive/alter4.q.out index ee8dd06..7e9e689 100644 --- ql/src/test/results/clientpositive/alter4.q.out +++ ql/src/test/results/clientpositive/alter4.q.out @@ -109,8 +109,12 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE alter4_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:alter4_db +PREHOOK: Output: database:alter4_db POSTHOOK: query: DROP DATABASE alter4_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:alter4_db +POSTHOOK: Output: database:alter4_db PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES diff --git ql/src/test/results/clientpositive/authorization_5.q.out ql/src/test/results/clientpositive/authorization_5.q.out index 11effa7..60ac8d4 100644 --- ql/src/test/results/clientpositive/authorization_5.q.out +++ ql/src/test/results/clientpositive/authorization_5.q.out @@ -80,5 +80,9 @@ privilege Select grantor hive_test_user PREHOOK: query: DROP DATABASE IF EXISTS test_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:test_db +PREHOOK: Output: database:test_db POSTHOOK: query: DROP DATABASE IF EXISTS test_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:test_db +POSTHOOK: Output: database:test_db diff --git ql/src/test/results/clientpositive/database.q.out ql/src/test/results/clientpositive/database.q.out index 2c67815..4bd94ff 100644 --- ql/src/test/results/clientpositive/database.q.out +++ ql/src/test/results/clientpositive/database.q.out @@ -38,9 +38,13 @@ test_db PREHOOK: query: -- DROP DROP DATABASE test_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:test_db +PREHOOK: Output: database:test_db POSTHOOK: query: -- DROP DROP DATABASE test_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:test_db +POSTHOOK: Output: database:test_db PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES @@ -61,9 +65,13 @@ test_db PREHOOK: query: -- DROP IE exists DROP DATABASE IF EXISTS test_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:test_db +PREHOOK: Output: database:test_db POSTHOOK: query: -- DROP IE exists DROP DATABASE IF EXISTS test_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:test_db +POSTHOOK: Output: database:test_db PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES @@ -305,8 +313,12 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE test_db PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:test_db +PREHOOK: Output: database:test_db POSTHOOK: query: DROP DATABASE test_db POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:test_db +POSTHOOK: Output: database:test_db PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES @@ -330,8 +342,12 @@ POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE PREHOOK: query: DROP DATABASE to_drop_db1 CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:to_drop_db1 +PREHOOK: Output: database:to_drop_db1 POSTHOOK: query: DROP DATABASE to_drop_db1 CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:to_drop_db1 +POSTHOOK: Output: database:to_drop_db1 PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES @@ -379,8 +395,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ] PREHOOK: query: DROP DATABASE to_drop_db2 CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:to_drop_db2 +PREHOOK: Output: database:to_drop_db2 POSTHOOK: query: DROP DATABASE to_drop_db2 CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:to_drop_db2 +POSTHOOK: Output: database:to_drop_db2 POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ] PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES @@ -420,8 +440,12 @@ POSTHOOK: type: SWITCHDATABASE POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ] PREHOOK: query: DROP DATABASE IF EXISTS to_drop_db3 CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:to_drop_db3 +PREHOOK: Output: database:to_drop_db3 POSTHOOK: query: DROP DATABASE IF EXISTS to_drop_db3 CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:to_drop_db3 +POSTHOOK: Output: database:to_drop_db3 POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ] PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES @@ -465,8 +489,12 @@ default to_drop_db4 PREHOOK: query: DROP DATABASE to_drop_db4 RESTRICT PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:to_drop_db4 +PREHOOK: Output: database:to_drop_db4 POSTHOOK: query: DROP DATABASE to_drop_db4 RESTRICT POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:to_drop_db4 +POSTHOOK: Output: database:to_drop_db4 POSTHOOK: Lineage: temp_tbl2.c EXPRESSION [(temp_tbl)temp_tbl.null, ] PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES diff --git ql/src/test/results/clientpositive/database_drop.q.out ql/src/test/results/clientpositive/database_drop.q.out index 37b0d31..38279b8 100644 --- ql/src/test/results/clientpositive/database_drop.q.out +++ ql/src/test/results/clientpositive/database_drop.q.out @@ -564,9 +564,13 @@ POSTHOOK: Lineage: db5__temp_tbl_idx1__.id SIMPLE [(temp_tbl)temp_tbl.FieldSchem PREHOOK: query: -- drop the database with cascade DROP DATABASE db5 CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:db5 +PREHOOK: Output: database:db5 POSTHOOK: query: -- drop the database with cascade DROP DATABASE db5 CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:db5 +POSTHOOK: Output: database:db5 POSTHOOK: Lineage: db5__part_tab2_idx4__ PARTITION(ds=2008-04-09)._bucketname SIMPLE [(part_tab2)part_tab2.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ] POSTHOOK: Lineage: db5__part_tab2_idx4__ PARTITION(ds=2008-04-09)._offsets EXPRESSION [(part_tab2)part_tab2.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ] POSTHOOK: Lineage: db5__part_tab2_idx4__ PARTITION(ds=2008-04-09).id SIMPLE [(part_tab2)part_tab2.FieldSchema(name:id, type:int, comment:null), ] diff --git ql/src/test/results/clientpositive/describe_database_json.q.out ql/src/test/results/clientpositive/describe_database_json.q.out index 5414469..90bf871 100644 --- ql/src/test/results/clientpositive/describe_database_json.q.out +++ ql/src/test/results/clientpositive/describe_database_json.q.out @@ -24,8 +24,12 @@ POSTHOOK: type: SHOWDATABASES {"databases":["jsondb1"]} PREHOOK: query: DROP DATABASE jsondb1 PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:jsondb1 +PREHOOK: Output: database:jsondb1 POSTHOOK: query: DROP DATABASE jsondb1 POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:jsondb1 +POSTHOOK: Output: database:jsondb1 PREHOOK: query: CREATE DATABASE jsondb1 PREHOOK: type: CREATEDATABASE POSTHOOK: query: CREATE DATABASE jsondb1 @@ -42,5 +46,9 @@ POSTHOOK: type: DESCDATABASE #### A masked pattern was here #### PREHOOK: query: DROP DATABASE jsondb1 PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:jsondb1 +PREHOOK: Output: database:jsondb1 POSTHOOK: query: DROP DATABASE jsondb1 POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:jsondb1 +POSTHOOK: Output: database:jsondb1 diff --git ql/src/test/results/clientpositive/drop_database_removes_partition_dirs.q.out ql/src/test/results/clientpositive/drop_database_removes_partition_dirs.q.out index e2f32b5..42c7ab3 100644 --- ql/src/test/results/clientpositive/drop_database_removes_partition_dirs.q.out +++ ql/src/test/results/clientpositive/drop_database_removes_partition_dirs.q.out @@ -54,8 +54,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DROP DATABASE test_database CASCADE PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:test_database +PREHOOK: Output: database:test_database POSTHOOK: query: DROP DATABASE test_database CASCADE POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:test_database +POSTHOOK: Output: database:test_database POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out index 4fbb6fb..388efa4 100644 --- ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out +++ ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out @@ -80,5 +80,9 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_01_nonpart.q.out ql/src/test/results/clientpositive/exim_01_nonpart.q.out index a13f6e1..0b2a1f8 100644 --- ql/src/test/results/clientpositive/exim_01_nonpart.q.out +++ ql/src/test/results/clientpositive/exim_01_nonpart.q.out @@ -92,5 +92,9 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out index c5f08f2..99d8a40 100644 --- ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out +++ ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out @@ -85,5 +85,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_02_part.q.out ql/src/test/results/clientpositive/exim_02_part.q.out index 667a22f..751cea4 100644 --- ql/src/test/results/clientpositive/exim_02_part.q.out +++ ql/src/test/results/clientpositive/exim_02_part.q.out @@ -110,5 +110,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out index 22c06d8..042a49c 100644 --- ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out +++ ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out @@ -85,5 +85,9 @@ POSTHOOK: Output: importer@exim_department #### A masked pattern was here #### PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_04_all_part.q.out ql/src/test/results/clientpositive/exim_04_all_part.q.out index 612fde0..f3fbe64 100644 --- ql/src/test/results/clientpositive/exim_04_all_part.q.out +++ ql/src/test/results/clientpositive/exim_04_all_part.q.out @@ -170,5 +170,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out index 1977ae1..62663eb 100644 --- ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out +++ ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out @@ -194,5 +194,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_05_some_part.q.out ql/src/test/results/clientpositive/exim_05_some_part.q.out index 0796180..fe251e6 100644 --- ql/src/test/results/clientpositive/exim_05_some_part.q.out +++ ql/src/test/results/clientpositive/exim_05_some_part.q.out @@ -148,5 +148,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_06_one_part.q.out ql/src/test/results/clientpositive/exim_06_one_part.q.out index c3269fe..15c6c2c 100644 --- ql/src/test/results/clientpositive/exim_06_one_part.q.out +++ ql/src/test/results/clientpositive/exim_06_one_part.q.out @@ -137,5 +137,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out index e3682b9..71b8e97 100644 --- ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out +++ ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out @@ -184,5 +184,9 @@ POSTHOOK: Output: importer@exim_employee #### A masked pattern was here #### PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out index c873133..eca4d9e 100644 --- ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out +++ ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out @@ -101,5 +101,9 @@ POSTHOOK: Output: importer@exim_department #### A masked pattern was here #### PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out index 9c3ff9c..fa2ac45 100644 --- ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out +++ ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out @@ -174,5 +174,9 @@ POSTHOOK: Output: importer@exim_employee #### A masked pattern was here #### PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_10_external_managed.q.out ql/src/test/results/clientpositive/exim_10_external_managed.q.out index 27bf602..551b033 100644 --- ql/src/test/results/clientpositive/exim_10_external_managed.q.out +++ ql/src/test/results/clientpositive/exim_10_external_managed.q.out @@ -79,5 +79,9 @@ POSTHOOK: Output: importer@exim_department #### A masked pattern was here #### PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_11_managed_external.q.out ql/src/test/results/clientpositive/exim_11_managed_external.q.out index cf6b66a..0d80aac 100644 --- ql/src/test/results/clientpositive/exim_11_managed_external.q.out +++ ql/src/test/results/clientpositive/exim_11_managed_external.q.out @@ -83,5 +83,9 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_12_external_location.q.out ql/src/test/results/clientpositive/exim_12_external_location.q.out index 36618b4..95909dd 100644 --- ql/src/test/results/clientpositive/exim_12_external_location.q.out +++ ql/src/test/results/clientpositive/exim_12_external_location.q.out @@ -86,5 +86,9 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_13_managed_location.q.out ql/src/test/results/clientpositive/exim_13_managed_location.q.out index 89a0958..123cfbd 100644 --- ql/src/test/results/clientpositive/exim_13_managed_location.q.out +++ ql/src/test/results/clientpositive/exim_13_managed_location.q.out @@ -86,5 +86,9 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out index 6e8eda5..77a6e05 100644 --- ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out +++ ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out @@ -98,5 +98,9 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_15_external_part.q.out ql/src/test/results/clientpositive/exim_15_external_part.q.out index 7685af4..2e2cbc2 100644 --- ql/src/test/results/clientpositive/exim_15_external_part.q.out +++ ql/src/test/results/clientpositive/exim_15_external_part.q.out @@ -220,5 +220,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_16_part_external.q.out ql/src/test/results/clientpositive/exim_16_part_external.q.out index f0898e5..ca7aaa2 100644 --- ql/src/test/results/clientpositive/exim_16_part_external.q.out +++ ql/src/test/results/clientpositive/exim_16_part_external.q.out @@ -178,5 +178,9 @@ POSTHOOK: Output: importer@exim_employee #### A masked pattern was here #### PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_17_part_managed.q.out ql/src/test/results/clientpositive/exim_17_part_managed.q.out index badd68f..d16211b 100644 --- ql/src/test/results/clientpositive/exim_17_part_managed.q.out +++ ql/src/test/results/clientpositive/exim_17_part_managed.q.out @@ -205,5 +205,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_18_part_external.q.out ql/src/test/results/clientpositive/exim_18_part_external.q.out index 5a79ac7..42dd0fb 100644 --- ql/src/test/results/clientpositive/exim_18_part_external.q.out +++ ql/src/test/results/clientpositive/exim_18_part_external.q.out @@ -172,5 +172,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out index f711b8f..e2e60bf 100644 --- ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out +++ ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out @@ -182,5 +182,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_19_part_external_location.q.out ql/src/test/results/clientpositive/exim_19_part_external_location.q.out index 6b7c5f9..fcd4c13 100644 --- ql/src/test/results/clientpositive/exim_19_part_external_location.q.out +++ ql/src/test/results/clientpositive/exim_19_part_external_location.q.out @@ -176,5 +176,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out index 0c15654..52cd141 100644 --- ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out +++ ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out @@ -176,5 +176,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out index dc48f4d..6746a44 100644 --- ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out +++ ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out @@ -81,6 +81,10 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out index 07b6ebe..4e0dfb0 100644 --- ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out +++ ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out @@ -104,5 +104,9 @@ POSTHOOK: Input: importer@exim_employee POSTHOOK: Output: importer@exim_employee PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer diff --git ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out index 980e6a2..70e9385 100644 --- ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out +++ ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out @@ -67,6 +67,10 @@ POSTHOOK: Input: importer@exim_department POSTHOOK: Output: importer@exim_department PREHOOK: query: drop database importer PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:importer +PREHOOK: Output: database:importer POSTHOOK: query: drop database importer POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:importer +POSTHOOK: Output: database:importer #### A masked pattern was here #### diff --git ql/src/test/results/clientpositive/input46.q.out ql/src/test/results/clientpositive/input46.q.out index c0dcc8e..8c49197 100644 --- ql/src/test/results/clientpositive/input46.q.out +++ ql/src/test/results/clientpositive/input46.q.out @@ -28,5 +28,9 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: table_in_database_creation@test4 PREHOOK: query: drop database table_in_database_creation cascade PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:table_in_database_creation +PREHOOK: Output: database:table_in_database_creation POSTHOOK: query: drop database table_in_database_creation cascade POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:table_in_database_creation +POSTHOOK: Output: database:table_in_database_creation diff --git ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out index 685629e..e68dc3d 100644 --- ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out +++ ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out @@ -255,16 +255,24 @@ POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).one SIMPLE [(sourcetable POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).two SIMPLE [(sourcetable)sourcetable.FieldSchema(name:two, type:string, comment:null), ] PREHOOK: query: DROP DATABASE db1 PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:db1 +PREHOOK: Output: database:db1 POSTHOOK: query: DROP DATABASE db1 POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:db1 +POSTHOOK: Output: database:db1 POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).one SIMPLE [(sourcetable)sourcetable.FieldSchema(name:one, type:string, comment:null), ] POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).two SIMPLE [(sourcetable)sourcetable.FieldSchema(name:two, type:string, comment:null), ] POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).one SIMPLE [(sourcetable)sourcetable.FieldSchema(name:one, type:string, comment:null), ] POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).two SIMPLE [(sourcetable)sourcetable.FieldSchema(name:two, type:string, comment:null), ] PREHOOK: query: DROP DATABASE db2 PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:db2 +PREHOOK: Output: database:db2 POSTHOOK: query: DROP DATABASE db2 POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:db2 +POSTHOOK: Output: database:db2 POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).one SIMPLE [(sourcetable)sourcetable.FieldSchema(name:one, type:string, comment:null), ] POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).two SIMPLE [(sourcetable)sourcetable.FieldSchema(name:two, type:string, comment:null), ] POSTHOOK: Lineage: destintable PARTITION(ds=2011-11-11).one SIMPLE [(sourcetable)sourcetable.FieldSchema(name:one, type:string, comment:null), ] diff --git ql/src/test/results/clientpositive/show_create_table_db_table.q.out ql/src/test/results/clientpositive/show_create_table_db_table.q.out index 92bdff9..d36e8b0 100644 --- ql/src/test/results/clientpositive/show_create_table_db_table.q.out +++ ql/src/test/results/clientpositive/show_create_table_db_table.q.out @@ -47,5 +47,9 @@ POSTHOOK: type: DROPTABLE POSTHOOK: Output: tmp_feng@tmp_showcrt PREHOOK: query: DROP DATABASE tmp_feng PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:tmp_feng +PREHOOK: Output: database:tmp_feng POSTHOOK: query: DROP DATABASE tmp_feng POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:tmp_feng +POSTHOOK: Output: database:tmp_feng