diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index aeced48..ed71196 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -806,7 +806,8 @@ private static String getFormattedDb(HiveConf conf, CliSessionState ss) { if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIPRINTCURRENTDB)) { return ""; } - String currDb = ss.getCurrentDbName(); + //BUG: This will not work in remote mode - HIVE-5153 + String currDb = SessionState.get().getCurrentDatabase(); if (currDb == null) { return ""; diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java b/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java index d11b873..f9d3beb 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliSessionState.java @@ -24,8 +24,6 @@ import java.util.Properties; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.service.HiveClient; import org.apache.thrift.TException; @@ -36,7 +34,7 @@ import org.apache.thrift.transport.TTransportException; /** - * CliSessionState. + * SessionState for hive cli. * */ public class CliSessionState extends SessionState { @@ -76,8 +74,6 @@ private TTransport transport; private HiveClient client; - private Hive hive; // currently only used (and init'ed) in getCurrentDbName - public CliSessionState(HiveConf conf) { super(conf); remoteMode = false; @@ -106,6 +102,7 @@ public int getPort() { return port; } + @Override public void close() { try { super.close(); @@ -128,18 +125,4 @@ public HiveClient getClient() { return client; } - /** - * Return the name of the current database - * @return the name of the current database or, if an error, null - */ - public String getCurrentDbName() { - if (hive == null) { - try { - hive = Hive.get(conf); - } catch (HiveException e) { - return null; - } - } - return hive.getCurrentDatabase(); - } } diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java b/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java index a95e206..63b9371 100644 --- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java +++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -84,10 +85,9 @@ public void testConnect() throws Exception { */ @Test public void testgetDbName() throws Exception { - HiveConf configuration = new HiveConf(); - CliSessionState sessionState = new CliSessionState(configuration); - assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME, sessionState.getCurrentDbName()); - + SessionState.start(new HiveConf()); + assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME, + SessionState.get().getCurrentDatabase()); } /** diff --git a/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java b/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 0e011e4..1d4a9a1 100644 --- a/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ b/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.common.ErrorType; import org.apache.hcatalog.common.HCatException; @@ -290,14 +291,14 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive ShowTablesDesc showTables = work.getShowTblsDesc(); if (showTables != null) { - String dbName = showTables.getDbName() == null ? cntxt.getHive().getCurrentDatabase() + String dbName = showTables.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTables.getDbName(); authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT); } ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc(); if (showTableStatus != null) { - String dbName = showTableStatus.getDbName() == null ? cntxt.getHive().getCurrentDatabase() + String dbName = showTableStatus.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTableStatus.getDbName(); authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT); } @@ -315,7 +316,7 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive //this is actually a ALTER TABLE DROP PARITITION statement for (PartitionSpec partSpec : dropTable.getPartSpecs()) { // partitions are not added as write entries in drop partitions in Hive - Table table = hive.getTable(hive.getCurrentDatabase(), dropTable.getTableName()); + Table table = hive.getTable(SessionState.get().getCurrentDatabase(), dropTable.getTableName()); List partitions = null; try { partitions = hive.getPartitionsByFilter(table, partSpec.toString()); @@ -332,7 +333,8 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive AlterTableDesc alterTable = work.getAlterTblDesc(); if (alterTable != null) { - Table table = hive.getTable(hive.getCurrentDatabase(), alterTable.getOldName(), false); + Table table = hive.getTable(SessionState.get().getCurrentDatabase(), + alterTable.getOldName(), false); Partition part = null; if (alterTable.getPartSpec() != null) { diff --git a/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java b/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java index 3740795..eb40e22 100644 --- a/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java +++ b/hcatalog/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java @@ -47,10 +47,6 @@ private HiveAuthorizationProvider authProvider; - protected String getDbName(Hive hive, String dbName) { - return dbName == null ? hive.getCurrentDatabase() : dbName; - } - public HiveAuthorizationProvider getAuthProvider() { if (authProvider == null) { authProvider = SessionState.get().getAuthorizer(); diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 75837ef..9ef87af 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.hcatalog.common.ErrorType; import org.apache.hive.hcatalog.common.HCatException; @@ -287,14 +288,14 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive ShowTablesDesc showTables = work.getShowTblsDesc(); if (showTables != null) { - String dbName = showTables.getDbName() == null ? cntxt.getHive().getCurrentDatabase() + String dbName = showTables.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTables.getDbName(); authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT); } ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc(); if (showTableStatus != null) { - String dbName = showTableStatus.getDbName() == null ? cntxt.getHive().getCurrentDatabase() + String dbName = showTableStatus.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTableStatus.getDbName(); authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT); } @@ -312,14 +313,13 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive //this is actually a ALTER TABLE DROP PARITITION statement for (PartitionSpec partSpec : dropTable.getPartSpecs()) { // partitions are not added as write entries in drop partitions in Hive - Table table = hive.getTable(hive.getCurrentDatabase(), dropTable.getTableName()); + Table table = hive.getTable(SessionState.get().getCurrentDatabase(), dropTable.getTableName()); List partitions = null; try { partitions = hive.getPartitionsByFilter(table, partSpec.toString()); } catch (Exception e) { throw new HiveException(e); } - for (Partition part : partitions) { authorize(part, Privilege.DROP); } @@ -329,7 +329,8 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive AlterTableDesc alterTable = work.getAlterTblDesc(); if (alterTable != null) { - Table table = hive.getTable(hive.getCurrentDatabase(), alterTable.getOldName(), false); + Table table = hive.getTable(SessionState.get().getCurrentDatabase(), + alterTable.getOldName(), false); Partition part = null; if (alterTable.getPartSpec() != null) { diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java index c144135..0184dc0 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java @@ -46,10 +46,6 @@ private HiveAuthorizationProvider authProvider; - protected String getDbName(Hive hive, String dbName) { - return dbName == null ? hive.getCurrentDatabase() : dbName; - } - public HiveAuthorizationProvider getAuthProvider() { if (authProvider == null) { authProvider = SessionState.get().getAuthorizer(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 0bc4425..d17b265 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -540,14 +540,14 @@ private void doAuthorization(BaseSemanticAnalyzer sem) if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { ss.getAuthorizer().authorize( - db.getDatabase(db.getCurrentDatabase()), null, + db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } else { if (op.equals(HiveOperation.IMPORT)) { ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem; if (!isa.existsTable()) { ss.getAuthorizer().authorize( - db.getDatabase(db.getCurrentDatabase()), null, + db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); } } @@ -831,14 +831,13 @@ else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) { plan.getQueryStr()); // Lock the database also - try { - Hive db = Hive.get(conf); - lockObjects.add(new HiveLockObj( - new HiveLockObject(db.getCurrentDatabase(), lockData), - HiveLockMode.SHARED)); - } catch (HiveException e) { - throw new SemanticException(e.getMessage()); - } + String currentDb = SessionState.get().getCurrentDatabase(); + lockObjects.add( + new HiveLockObj( + new HiveLockObject(currentDb, lockData), + HiveLockMode.SHARED + ) + ); List hiveLocks = ctx.getHiveLockMgr().lock(lockObjects, false); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java index 9de7dcf..99b062f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java @@ -36,13 +36,12 @@ import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; import org.apache.hadoop.hive.metastore.api.StringColumnStatsData; import org.apache.hadoop.hive.ql.CommandNeedRetryException; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ColumnStatsWork; -import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.api.StageType; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -234,7 +233,7 @@ private ColumnStatistics constructColumnStatsFromPackedRow(ObjectInspector oi, throw new HiveException("Unexpected object type encountered while unpacking row"); } - String dbName = db.getCurrentDatabase(); + String dbName = SessionState.get().getCurrentDatabase(); String tableName = work.getColStats().getTableName(); String partName = null; List colName = work.getColStats().getColName(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 4dcb260..972bec4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -40,10 +40,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; @@ -112,8 +112,9 @@ import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition; +import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; @@ -151,9 +152,9 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TruncateTableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -535,7 +536,7 @@ private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { dbName = dbTab[0]; tableName = dbTab[1]; } else { - dbName = db.getCurrentDatabase(); + dbName = SessionState.get().getCurrentDatabase(); tableName = obj; } dbObj = db.getDatabase(dbName); @@ -701,7 +702,7 @@ private int grantOrRevokePrivileges(List principals, dbName = dbTab[0]; tableName = dbTab[1]; } else { - dbName = db.getCurrentDatabase(); + dbName = SessionState.get().getCurrentDatabase(); tableName = obj; } dbObj = db.getDatabase(dbName); @@ -874,7 +875,7 @@ private int alterDatabase(AlterDatabaseDesc alterDbDesc) throws HiveException { } private int dropIndex(Hive db, DropIndexDesc dropIdx) throws HiveException { - db.dropIndex(db.getCurrentDatabase(), dropIdx.getTableName(), + db.dropIndex(SessionState.get().getCurrentDatabase(), dropIdx.getTableName(), dropIdx.getIndexName(), true); return 0; } @@ -897,7 +898,7 @@ private int createIndex(Hive db, CreateIndexDesc crtIndex) throws HiveException if (HiveUtils.getIndexHandler(conf, crtIndex.getIndexTypeHandlerClass()).usesIndexTable()) { String indexTableName = crtIndex.getIndexTableName() != null ? crtIndex.getIndexTableName() : - MetaStoreUtils.getIndexTableName(db.getCurrentDatabase(), + MetaStoreUtils.getIndexTableName(SessionState.get().getCurrentDatabase(), crtIndex.getTableName(), crtIndex.getIndexName()); Table indexTable = db.getTable(indexTableName); work.getOutputs().add(new WriteEntity(indexTable)); @@ -919,7 +920,10 @@ private int alterIndex(Hive db, AlterIndexDesc alterIndex) throws HiveException try { Map props = new HashMap(); Map, Long> basePartTs = new HashMap, Long>(); - Table baseTbl = db.getTable(db.getCurrentDatabase(), baseTableName); + + Table baseTbl = db.getTable(SessionState.get().getCurrentDatabase(), + baseTableName); + if (baseTbl.isPartitioned()) { List baseParts; if (alterIndex.getSpec() != null) { @@ -3475,7 +3479,7 @@ private int switchDatabase(Hive db, SwitchDatabaseDesc switchDb) if (!db.databaseExists(dbName)) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } - db.setCurrentDatabase(dbName); + SessionState.get().setCurrentDatabase(dbName); // set database specific parameters Database database = db.getDatabase(dbName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 344c9b1..f668355 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.metadata; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; import static org.apache.hadoop.hive.serde.serdeConstants.COLLECTION_DELIM; import static org.apache.hadoop.hive.serde.serdeConstants.ESCAPE_CHAR; @@ -93,10 +92,12 @@ import com.google.common.collect.Sets; /** - * The Hive class contains information about this instance of Hive. An instance - * of Hive represents a set of data in a file system (usually HDFS) organized - * for easy query processing + * This class has functions that implement meta data/DDL operations using calls + * to the metastore. + * It has a metastore client instance it uses to communicate with the metastore. * + * It is a thread local variable, and the instances is accessed using static + * get methods in this class. */ public class Hive { @@ -105,7 +106,6 @@ private HiveConf conf = null; private IMetaStoreClient metaStoreClient; - private String currentDatabase; private static ThreadLocal hiveDB = new ThreadLocal() { @Override @@ -166,9 +166,6 @@ public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException { closeCurrent(); c.set("fs.scheme.class", "dfs"); Hive newdb = new Hive(c); - if (db != null && db.getCurrentDatabase() != null){ - newdb.setCurrentDatabase(db.getCurrentDatabase()); - } hiveDB.set(newdb); return newdb; } @@ -574,7 +571,7 @@ public void createTable(Table tbl) throws HiveException { public void createTable(Table tbl, boolean ifNotExists) throws HiveException { try { if (tbl.getDbName() == null || "".equals(tbl.getDbName().trim())) { - tbl.setDbName(getCurrentDatabase()); + tbl.setDbName(SessionState.get().getCurrentDatabase()); } if (tbl.getCols().size() == 0) { tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), @@ -650,7 +647,7 @@ public void createIndex(String tableName, String indexName, String indexHandlerC throws HiveException { try { - String dbName = getCurrentDatabase(); + String dbName = SessionState.get().getCurrentDatabase(); Index old_index = null; try { old_index = getIndex(dbName, tableName, indexName); @@ -794,7 +791,8 @@ public Index getIndex(String qualifiedIndexName) throws HiveException { case 3: return getIndex(names[0], names[1], names[2]); case 2: - return getIndex(getCurrentDatabase(), names[0], names[1]); + return getIndex(SessionState.get().getCurrentDatabase(), + names[0], names[1]); default: throw new HiveException("Invalid index name:" + qualifiedIndexName); } @@ -1000,7 +998,7 @@ public Table getTable(final String dbName, final String tableName, * @throws HiveException */ public List getAllTables() throws HiveException { - return getAllTables(getCurrentDatabase()); + return getAllTables(SessionState.get().getCurrentDatabase()); } /** @@ -1023,7 +1021,8 @@ public Table getTable(final String dbName, final String tableName, * @throws HiveException */ public List getTablesByPattern(String tablePattern) throws HiveException { - return getTablesByPattern(getCurrentDatabase(), tablePattern); + return getTablesByPattern(SessionState.get().getCurrentDatabase(), + tablePattern); } /** @@ -1146,6 +1145,16 @@ public Database getDatabase(String dbName) throws HiveException { } /** + * Get the Database object for current database + * @return a Database object if this database exists, null otherwise. + * @throws HiveException + */ + public Database getDatabaseCurrent() throws HiveException { + String currentDb = SessionState.get().getCurrentDatabase(); + return getDatabase(currentDb); + } + + /** * Load a directory into a Hive Table Partition - Alters existing content of * the partition with the contents of loadPath. - If the partition does not * exist - one is created - files in loadPath are moved into Hive. But the @@ -1908,25 +1917,6 @@ public void validatePartitionNameCharacters(List partVals) throws HiveEx } } - /** - * Get the name of the current database - * @return the current database name - */ - public String getCurrentDatabase() { - if (null == currentDatabase) { - currentDatabase = DEFAULT_DATABASE_NAME; - } - return currentDatabase; - } - - /** - * Set the name of the current database - * @param currentDatabase - */ - public void setCurrentDatabase(String currentDatabase) { - this.currentDatabase = currentDatabase; - } - public void createRole(String roleName, String ownerName) throws HiveException { try { @@ -2500,7 +2490,7 @@ public Table newTable(String tableName) throws HiveException { case 2: return new Table(names[0], names[1]); case 1: - return new Table(getCurrentDatabase(), names[0]); + return new Table(SessionState.get().getCurrentDatabase(), names[0]); default: try{ throw new HiveException("Invalid table name: " + tableName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 932ec66..37e01de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1120,7 +1120,7 @@ private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException { AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.UPDATETIMESTAMP); alterIdxDesc.setIndexName(indexName); alterIdxDesc.setBaseTableName(baseTableName); - alterIdxDesc.setDbName(db.getCurrentDatabase()); + alterIdxDesc.setDbName(SessionState.get().getCurrentDatabase()); alterIdxDesc.setSpec(partSpec); Task tsTask = TaskFactory.get(new DDLWork(alterIdxDesc), conf); @@ -1142,7 +1142,7 @@ private void analyzeAlterIndexProps(ASTNode ast) alterIdxDesc.setProps(mapProp); alterIdxDesc.setIndexName(indexName); alterIdxDesc.setBaseTableName(baseTableName); - alterIdxDesc.setDbName(db.getCurrentDatabase()); + alterIdxDesc.setDbName(SessionState.get().getCurrentDatabase()); rootTasks.add(TaskFactory.get(new DDLWork(alterIdxDesc), conf)); } @@ -1150,7 +1150,7 @@ private void analyzeAlterIndexProps(ASTNode ast) private List> getIndexBuilderMapRed(String baseTableName, String indexName, HashMap partSpec) throws SemanticException { try { - String dbName = db.getCurrentDatabase(); + String dbName = SessionState.get().getCurrentDatabase(); Index index = db.getIndex(dbName, baseTableName, indexName); Table indexTbl = getTable(index.getIndexTableName()); String baseTblName = index.getOrigTableName(); @@ -2056,7 +2056,7 @@ private void analyzeShowDatabases(ASTNode ast) throws SemanticException { private void analyzeShowTables(ASTNode ast) throws SemanticException { ShowTablesDesc showTblsDesc; - String dbName = db.getCurrentDatabase(); + String dbName = SessionState.get().getCurrentDatabase(); String tableNames = null; if (ast.getChildCount() > 3) { @@ -2119,7 +2119,7 @@ private void analyzeShowColumns(ASTNode ast) throws SemanticException { private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { ShowTableStatusDesc showTblStatusDesc; String tableNames = getUnescapedName((ASTNode) ast.getChild(0)); - String dbName = db.getCurrentDatabase(); + String dbName = SessionState.get().getCurrentDatabase(); int children = ast.getChildCount(); HashMap partSpec = null; if (children >= 2) { @@ -2152,7 +2152,7 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { private void analyzeShowTableProperties(ASTNode ast) throws SemanticException { ShowTblPropertiesDesc showTblPropertiesDesc; String tableNames = getUnescapedName((ASTNode) ast.getChild(0)); - String dbName = db.getCurrentDatabase(); + String dbName = SessionState.get().getCurrentDatabase(); String propertyName = null; if (ast.getChildCount() > 1) { propertyName = unescapeSQLString(ast.getChild(1).getText()); @@ -2414,7 +2414,7 @@ private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, partSpecs.add(newPartSpec); addTablePartsOutputs(tblName, partSpecs); RenamePartitionDesc renamePartitionDesc = new RenamePartitionDesc( - db.getCurrentDatabase(), tblName, oldPartSpec, newPartSpec); + SessionState.get().getCurrentDatabase(), tblName, oldPartSpec, newPartSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), renamePartitionDesc), conf)); } @@ -2504,7 +2504,7 @@ private void analyzeAlterTableAlterParts(ASTNode ast) // check if table exists. try { - tab = db.getTable(db.getCurrentDatabase(), tblName, true); + tab = db.getTable(SessionState.get().getCurrentDatabase(), tblName, true); inputs.add(new ReadEntity(tab)); } catch (HiveException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); @@ -2542,7 +2542,7 @@ private void analyzeAlterTableAlterParts(ASTNode ast) } AlterTableAlterPartDesc alterTblAlterPartDesc = - new AlterTableAlterPartDesc(db.getCurrentDatabase(), tblName, newCol); + new AlterTableAlterPartDesc(SessionState.get().getCurrentDatabase(), tblName, newCol); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblAlterPartDesc), conf)); @@ -2592,7 +2592,7 @@ private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView) if (currentPart != null) { validatePartitionValues(currentPart); AddPartitionDesc addPartitionDesc = new AddPartitionDesc( - db.getCurrentDatabase(), tblName, currentPart, + SessionState.get().getCurrentDatabase(), tblName, currentPart, currentLocation, ifNotExists, expectView); partitionDescs.add(addPartitionDesc); } @@ -2613,7 +2613,7 @@ private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView) if (currentPart != null) { validatePartitionValues(currentPart); AddPartitionDesc addPartitionDesc = new AddPartitionDesc( - db.getCurrentDatabase(), tblName, currentPart, + SessionState.get().getCurrentDatabase(), tblName, currentPart, currentLocation, ifNotExists, expectView); partitionDescs.add(addPartitionDesc); } @@ -2698,7 +2698,7 @@ private void analyzeAlterTableTouch(CommonTree ast) if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - db.getCurrentDatabase(), tblName, null, + SessionState.get().getCurrentDatabase(), tblName, null, AlterTableDesc.AlterTableTypes.TOUCH); outputs.add(new WriteEntity(tab)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), @@ -2707,7 +2707,7 @@ private void analyzeAlterTableTouch(CommonTree ast) addTablePartsOutputs(tblName, partSpecs); for (Map partSpec : partSpecs) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - db.getCurrentDatabase(), tblName, partSpec, + SessionState.get().getCurrentDatabase(), tblName, partSpec, AlterTableDesc.AlterTableTypes.TOUCH); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc), conf)); @@ -2747,7 +2747,7 @@ private void analyzeAlterTableArchive(CommonTree ast, boolean isUnArchive) throw new SemanticException(e.getMessage(), e); } AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc( - db.getCurrentDatabase(), tblName, partSpec, + SessionState.get().getCurrentDatabase(), tblName, partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc), conf)); @@ -3240,7 +3240,7 @@ private Table getTable(String tblName) throws SemanticException { } private Table getTable(String tblName, boolean throwException) throws SemanticException { - return getTable(db.getCurrentDatabase(), tblName, throwException); + return getTable(SessionState.get().getCurrentDatabase(), tblName, throwException); } private Table getTable(String database, String tblName, boolean throwException) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 3413f51..e97d948 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -56,6 +56,7 @@ import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; /** @@ -94,7 +95,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { Path metadataPath = new Path(fromPath, METADATA_NAME); Map.Entry> rv = EximUtil.readMetaData(fs, metadataPath); - dbname = db.getCurrentDatabase(); + dbname = SessionState.get().getCurrentDatabase(); org.apache.hadoop.hive.metastore.api.Table table = rv.getKey(); tblDesc = new CreateTableDesc( table.getTableName(), @@ -241,8 +242,9 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { Task t = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), tblDesc), conf); Table table = new Table(dbname, tblDesc.getTableName()); + String currentDb = SessionState.get().getCurrentDatabase(); conf.set("import.destination.dir", - wh.getTablePath(db.getDatabase(db.getCurrentDatabase()), + wh.getTablePath(db.getDatabaseCurrent(), tblDesc.getTableName()).toString()); if ((tblDesc.getPartCols() != null) && (tblDesc.getPartCols().size() != 0)) { for (AddPartitionDesc addPartitionDesc : partitionDescs) { @@ -260,7 +262,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { if (tblDesc.getLocation() != null) { tablePath = new Path(tblDesc.getLocation()); } else { - tablePath = wh.getTablePath(db.getDatabase(db.getCurrentDatabase()), tblDesc.getTableName()); + tablePath = wh.getTablePath(db.getDatabaseCurrent(), tblDesc.getTableName()); } checkTargetLocationEmpty(fs, tablePath); t.addDependentTask(loadTable(fromURI, table)); @@ -312,7 +314,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { Warehouse.makePartPath(addPartitionDesc.getPartSpec())); } else { tgtPath = new Path(wh.getTablePath( - db.getDatabase(db.getCurrentDatabase()), tblDesc.getTableName()), + db.getDatabaseCurrent(), tblDesc.getTableName()), Warehouse.makePartPath(addPartitionDesc.getPartSpec())); } } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 6d50109..22aef95 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -603,7 +603,7 @@ private String processTable(QB qb, ASTNode tabref) throws SemanticException { qb.getParseInfo().setSrcForAlias(alias, tableTree); - unparseTranslator.addTableNameTranslation(tableTree, db.getCurrentDatabase()); + unparseTranslator.addTableNameTranslation(tableTree, SessionState.get().getCurrentDatabase()); if (aliasIndex != 0) { unparseTranslator.addIdentifierTranslation((ASTNode) tabref .getChild(aliasIndex)); @@ -802,7 +802,7 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1) break; case HiveParser.TOK_INSERT_INTO: - String currentDatabase = db.getCurrentDatabase(); + String currentDatabase = SessionState.get().getCurrentDatabase(); String tab_name = getUnescapedName((ASTNode) ast.getChild(0).getChild(0), currentDatabase); qbp.addInsertIntoTable(tab_name); @@ -8327,7 +8327,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { } viewSelect = child; // prevent view from referencing itself - viewsExpanded.add(db.getCurrentDatabase() + "." + createVwDesc.getViewName()); + viewsExpanded.add(SessionState.get().getCurrentDatabase() + "." + createVwDesc.getViewName()); } // continue analyzing from the child ASTNode. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 6888504..e8d50f6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.session; +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import java.io.File; import java.io.IOException; @@ -133,6 +134,8 @@ private Map> localMapRedErrors; + private String currentDatabase; + /** * Lineage state. */ @@ -767,6 +770,17 @@ public void setLocalMapRedErrors(Map> localMapRedErrors) { this.localMapRedErrors = localMapRedErrors; } + public String getCurrentDatabase() { + if (currentDatabase == null) { + currentDatabase = DEFAULT_DATABASE_NAME; + } + return currentDatabase; + } + + public void setCurrentDatabase(String currentDatabase) { + this.currentDatabase = currentDatabase; + } + public void close() throws IOException { File resourceDir = new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR)); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index d318f71..bc2bfce 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -470,7 +470,7 @@ public void clearTestSideEffects () throws Exception { // Delete any tables other than the source tables // and any databases other than the default database. for (String dbName : db.getAllDatabases()) { - db.setCurrentDatabase(dbName); + SessionState.get().setCurrentDatabase(dbName); for (String tblName : db.getAllTables()) { if (!DEFAULT_DATABASE_NAME.equals(dbName) || !srcTables.contains(tblName)) { Table tblObj = db.getTable(tblName); @@ -494,7 +494,7 @@ public void clearTestSideEffects () throws Exception { db.dropDatabase(dbName); } } - Hive.get().setCurrentDatabase(DEFAULT_DATABASE_NAME); + SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME); List roleNames = db.getAllRoleNames(); for (String roleName : roleNames) { @@ -617,7 +617,8 @@ public void createSources() throws Exception { db.createTable("src_sequencefile", cols, null, SequenceFileInputFormat.class, SequenceFileOutputFormat.class); - Table srcThrift = new Table(db.getCurrentDatabase(), "src_thrift"); + Table srcThrift = + new Table(SessionState.get().getCurrentDatabase(), "src_thrift"); srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName()); srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); srcThrift.setSerializationLib(ThriftDeserializer.class.getName()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 9588061..68c319c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.ScriptDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.TextInputFormat; @@ -78,7 +79,8 @@ static { try { conf = new HiveConf(ExecDriver.class); - + SessionState.start(conf); + fs = FileSystem.get(conf); if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) { throw new RuntimeException(tmpdir + " exists but is not a directory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index 8b8c276..8beef09 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -64,6 +64,7 @@ protected void setUp() { try { conf = new HiveConf(HiveHistory.class); + SessionState.start(conf); fs = FileSystem.get(conf); if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 4ee5267..a8f548d 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.index.HiveIndex; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer; @@ -61,6 +62,7 @@ protected void setUp() throws Exception { super.setUp(); hiveConf = new HiveConf(this.getClass()); + SessionState.start(hiveConf); try { hm = Hive.get(hiveConf); } catch (Exception e) { @@ -487,13 +489,23 @@ public void testIndex() throws Throwable { } } - public void testHiveRefreshDatabase() throws Throwable{ - String testDatabaseName = "test_database"; - Database testDatabase = new Database(); - testDatabase.setName(testDatabaseName); - hm.createDatabase(testDatabase, true); - hm.setCurrentDatabase(testDatabaseName); - hm = Hive.get(hiveConf, true); //refresh Hive instance - assertEquals(testDatabaseName, hm.getCurrentDatabase()); + public void testHiveRefreshOnConfChange() throws Throwable{ + Hive prevHiveObj = Hive.get(); + Hive newHiveObj; + + //if HiveConf has not changed, same object should be returned + HiveConf newHconf = new HiveConf(hiveConf); + newHiveObj = Hive.get(newHconf); + assertTrue(prevHiveObj == newHiveObj); + + //if needs refresh param is passed, it should return new object + newHiveObj = Hive.get(newHconf, true); + assertTrue(prevHiveObj != newHiveObj); + + //if HiveConf has changed, new object should be returned + prevHiveObj = Hive.get(); // get current thread local object + newHconf.set("dummykey", "dummyvalue"); + newHiveObj = Hive.get(newHconf); + assertTrue(prevHiveObj != newHiveObj); } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java index 81a3396..91de8da 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro; import org.junit.Before; import org.junit.Test; @@ -44,6 +45,7 @@ public void setup() throws Exception { context = new Context(conf); parseDriver = new ParseDriver(); analyzer = new MacroSemanticAnalyzer(conf); + SessionState.start(conf); } private ASTNode parse(String command) throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java new file mode 100644 index 0000000..a808a6c --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.session; + +import static org.junit.Assert.assertEquals; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.junit.Before; +import org.junit.Test; + +/** + * Test SessionState + */ +public class TestSessionState { + + + @Before + public void setup(){ + SessionState.start(new HiveConf()); + } + + /** + * test set and get db + */ + @Test + public void testgetDbName() throws Exception { + //check that we start with default db + assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME, + SessionState.get().getCurrentDatabase()); + final String newdb = "DB_2"; + + //set new db and verify get + SessionState.get().setCurrentDatabase(newdb); + assertEquals(newdb, + SessionState.get().getCurrentDatabase()); + + //verify that a new sessionstate has default db + SessionState.start(new HiveConf()); + assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME, + SessionState.get().getCurrentDatabase()); + + } + + +}