commit c8fac5970781d87e04a51777d240048f0db07a7d Author: Alan Gates Date: Fri Jun 1 17:26:13 2018 -0700 HIVE-19769 Create dedicated objects for DB and Table names diff --git itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java index ff97522e63..aa66c8439a 100644 --- itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java +++ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java @@ -18,6 +18,7 @@ package org.apache.hive.hcatalog.listener; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; import org.apache.hadoop.hive.metastore.api.Catalog; @@ -90,7 +91,6 @@ import org.apache.hadoop.hive.metastore.api.WMNullablePool; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.ColStatsObjWithSourceInfo; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.thrift.TException; /** @@ -1226,13 +1226,13 @@ public int deleteRuntimeStats(int maxRetainSecs) throws MetaException { @Override - public List getTableNamesWithStats() throws MetaException, + public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { return null; } @Override - public List getAllTableNamesForStats() throws MetaException, + public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 83490d2d53..a081bef767 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -61,6 +61,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.cache.CachedStore; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.MapRedStats; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.Registry; @@ -229,6 +230,8 @@ private String currentDatabase; + private String currentCatalog; + private final String CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER = "hive.internal.ss.authz.settings.applied.marker"; @@ -1725,6 +1728,17 @@ public void setCurrentDatabase(String currentDatabase) { this.currentDatabase = currentDatabase; } + public String getCurrentCatalog() { + if (currentCatalog == null) { + currentCatalog = MetaStoreUtils.getDefaultCatalog(getConf()); + } + return currentCatalog; + } + + public void setCurrentCatalog(String currentCatalog) { + this.currentCatalog = currentCatalog; + } + public void close() throws IOException { for (Closeable cleanupItem : cleanupItems) { try { diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java index 285db31fd4..ddca70497a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUpdaterThread.java @@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreThread; @@ -52,7 +53,6 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf.StatsUpdateMode; import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.metastore.txn.TxnUtils; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.hadoop.hive.ql.DriverUtils; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.session.SessionState; @@ -75,7 +75,7 @@ private RawStore rs; private TxnStore txnHandler; /** Full tables, and partitions that currently have analyze commands queued or in progress. */ - private ConcurrentHashMap tablesInProgress = new ConcurrentHashMap<>(); + private ConcurrentHashMap tablesInProgress = new ConcurrentHashMap<>(); private ConcurrentHashMap partsInProgress = new ConcurrentHashMap<>(); private AtomicInteger itemsInProgress = new AtomicInteger(0); @@ -170,7 +170,7 @@ void startWorkers() { @VisibleForTesting boolean runOneIteration() { - List fullTableNames; + List fullTableNames; try { fullTableNames = getTablesToCheck(); } catch (Throwable t) { @@ -180,7 +180,7 @@ boolean runOneIteration() { } LOG.debug("Processing {}", fullTableNames); boolean hadUpdates = false; - for (FullTableName fullTableName : fullTableNames) { + for (TableName fullTableName : fullTableNames) { try { List commands = processOneTable(fullTableName); hadUpdates = hadUpdates || commands != null; @@ -203,10 +203,10 @@ private void stopWorkers() { } } - private List processOneTable(FullTableName fullTableName) + private List processOneTable(TableName fullTableName) throws MetaException, NoSuchTxnException, NoSuchObjectException { if (isAnalyzeTableInProgress(fullTableName)) return null; - String cat = fullTableName.catalog, db = fullTableName.db, tbl = fullTableName.table; + String cat = fullTableName.getCat(), db = fullTableName.getDb(), tbl = fullTableName.getTable(); Table table = rs.getTable(cat, db, tbl); LOG.debug("Processing table {}", table); @@ -262,7 +262,7 @@ private void stopWorkers() { } } - private List findPartitionsToAnalyze(FullTableName fullTableName, String cat, String db, + private List findPartitionsToAnalyze(TableName fullTableName, String cat, String db, String tbl, List allCols, Map> partsToAnalyze) throws MetaException, NoSuchObjectException { // TODO: ideally when col-stats-accurate stuff is stored in some sane structure, this should @@ -404,7 +404,7 @@ private String buildPartColStr(Table table) { return partColStr; } - private List getExistingNonPartTableStatsToUpdate(FullTableName fullTableName, + private List getExistingNonPartTableStatsToUpdate(TableName fullTableName, String cat, String db, String tbl, Map params, List allCols) throws MetaException { ColumnStatistics existingStats = null; @@ -445,7 +445,7 @@ private String buildPartColStr(Table table) { return colsToUpdate; } - private List getTablesToCheck() throws MetaException, NoSuchObjectException { + private List getTablesToCheck() throws MetaException, NoSuchObjectException { if (isExistingOnly) { try { return rs.getTableNamesWithStats(); @@ -457,7 +457,7 @@ private String buildPartColStr(Table table) { } private ValidReaderWriteIdList getWriteIds( - FullTableName fullTableName) throws NoSuchTxnException, MetaException { + TableName fullTableName) throws NoSuchTxnException, MetaException { GetValidWriteIdsRequest req = new GetValidWriteIdsRequest(); req.setFullTableNames(Lists.newArrayList(fullTableName.toString())); return TxnUtils.createValidReaderWriteIdList( @@ -504,24 +504,24 @@ private void markAnalyzeDone(AnalyzeWork req) { } } - private boolean isAnalyzeTableInProgress(FullTableName fullTableName) { + private boolean isAnalyzeTableInProgress(TableName fullTableName) { return tablesInProgress.containsKey(fullTableName); } - private boolean isAnalyzePartInProgress(FullTableName tableName, String partName) { + private boolean isAnalyzePartInProgress(TableName tableName, String partName) { return partsInProgress.containsKey(makeFullPartName(tableName, partName)); } - private static String makeFullPartName(FullTableName tableName, String partName) { + private static String makeFullPartName(TableName tableName, String partName) { return tableName + "/" + partName; } private final static class AnalyzeWork { - FullTableName tableName; + TableName tableName; String partName, allParts; List cols; - public AnalyzeWork(FullTableName tableName, String partName, String allParts, List cols) { + public AnalyzeWork(TableName tableName, String partName, String allParts, List cols) { this.tableName = tableName; this.partName = partName; this.allParts = allParts; @@ -534,7 +534,7 @@ public String makeFullPartName() { public String buildCommand() { // Catalogs cannot be parsed as part of the query. Seems to be a bug. - String cmd = "analyze table " + tableName.db + "." + tableName.table; + String cmd = "analyze table " + tableName.getDb() + "." + tableName.getTable(); assert partName == null || allParts == null; if (partName != null) { cmd += " partition(" + partName + ")"; diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index ed53c90eb6..33999d03dc 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -21,6 +21,7 @@ import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; @@ -163,7 +164,7 @@ public void alterTable(RawStore msdb, Warehouse wh, String catName, String dbnam oldt = msdb.getTable(catName, dbname, name); if (oldt == null) { throw new InvalidOperationException("table " + - Warehouse.getCatalogQualifiedTableName(catName, dbname, name) + " doesn't exist"); + TableName.getQualified(catName, dbname, name) + " doesn't exist"); } if (oldt.getPartitionKeysSize() != 0) { @@ -238,7 +239,7 @@ public void alterTable(RawStore msdb, Warehouse wh, String catName, String dbnam try { if (destFs.exists(destPath)) { throw new InvalidOperationException("New location for this table " + - Warehouse.getCatalogQualifiedTableName(catName, newDbName, newTblName) + + TableName.getQualified(catName, newDbName, newTblName) + " already exists : " + destPath); } // check that src exists and also checks permissions necessary, rename src to dest @@ -391,8 +392,7 @@ public void alterTable(RawStore msdb, Warehouse wh, String catName, String dbnam + " Check metastore logs for detailed stack." + e.getMessage()); } finally { if (!success) { - LOG.error("Failed to alter table " + - Warehouse.getCatalogQualifiedTableName(catName, dbname, name)); + LOG.error("Failed to alter table " + TableName.getQualified(catName, dbname, name)); msdb.rollbackTransaction(); if (dataWasMoved) { try { diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index b9f5fb874d..8d7b0c394d 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -78,6 +78,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.events.AddForeignKeyEvent; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; @@ -913,7 +914,7 @@ private String startFunction(String function) { private void startTableFunction(String function, String catName, String db, String tbl) { startFunction(function, " : tbl=" + - getCatalogQualifiedTableName(catName, db, tbl)); + TableName.getQualified(catName, db, tbl)); } private void startMultiTableFunction(String function, String db, List tbls) { @@ -924,13 +925,13 @@ private void startMultiTableFunction(String function, String db, List tb private void startPartitionFunction(String function, String cat, String db, String tbl, List partVals) { startFunction(function, " : tbl=" + - getCatalogQualifiedTableName(cat, db, tbl) + "[" + join(partVals, ",") + "]"); + TableName.getQualified(cat, db, tbl) + "[" + join(partVals, ",") + "]"); } private void startPartitionFunction(String function, String catName, String db, String tbl, Map partName) { startFunction(function, " : tbl=" + - getCatalogQualifiedTableName(catName, db, tbl) + "partition=" + partName); + TableName.getQualified(catName, db, tbl) + "partition=" + partName); } private void endFunction(String function, boolean successful, Exception e) { @@ -2429,7 +2430,7 @@ private boolean drop_table_core(final RawStore ms, final String catName, final S ms.dropConstraint(catName, dbname, name, null, true); if (!ms.dropTable(catName, dbname, name)) { - String tableName = getCatalogQualifiedTableName(catName, dbname, name); + String tableName = TableName.getQualified(catName, dbname, name); throw new MetaException(indexName == null ? "Unable to drop table " + tableName: "Unable to drop index table " + tableName + " for index " + indexName); } else { @@ -2846,7 +2847,7 @@ public Table get_table_core(final String catName, final String dbname, final Str try { t = getMS().getTable(catName, dbname, name); if (t == null) { - throw new NoSuchObjectException(getCatalogQualifiedTableName(catName, dbname, name) + + throw new NoSuchObjectException(TableName.getQualified(catName, dbname, name) + " table not found"); } } catch (Exception e) { @@ -3223,7 +3224,7 @@ public boolean equals(Object obj) { tbl = ms.getTable(catName, dbName, tblName); if (tbl == null) { throw new InvalidObjectException("Unable to add partitions because " - + getCatalogQualifiedTableName(catName, dbName, tblName) + + + TableName.getQualified(catName, dbName, tblName) + " does not exist"); } @@ -3243,7 +3244,7 @@ public boolean equals(Object obj) { if (!part.getTableName().equals(tblName) || !part.getDbName().equals(dbName)) { String errorMsg = String.format( "Partition does not belong to target table %s. It belongs to the table %s.%s : %s", - getCatalogQualifiedTableName(catName, dbName, tblName), part.getDbName(), + TableName.getQualified(catName, dbName, tblName), part.getDbName(), part.getTableName(), part.toString()); throw new MetaException(errorMsg); } @@ -3881,14 +3882,14 @@ private boolean isRenameAllowed(String catalogName, String srcDBName, String des ms.getTable(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName); if (destinationTable == null) { throw new MetaException( "The destination table " + - getCatalogQualifiedTableName(parsedDestDbName[CAT_NAME], + TableName.getQualified(parsedDestDbName[CAT_NAME], parsedDestDbName[DB_NAME], destTableName) + " not found"); } Table sourceTable = ms.getTable(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName); if (sourceTable == null) { throw new MetaException("The source table " + - getCatalogQualifiedTableName(parsedSourceDbName[CAT_NAME], + TableName.getQualified(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName) + " not found"); } @@ -3946,7 +3947,7 @@ private boolean isRenameAllowed(String catalogName, String srcDBName, String des if (!isRenameAllowed(parsedDestDbName[CAT_NAME], parsedSourceDbName[DB_NAME], parsedDestDbName[DB_NAME])) { throw new MetaException("Exchange partition not allowed for " + - getCatalogQualifiedTableName(parsedSourceDbName[CAT_NAME], + TableName.getQualified(parsedSourceDbName[CAT_NAME], parsedSourceDbName[DB_NAME], sourceTableName) + " Dest db : " + destDbName); } try { @@ -4404,7 +4405,7 @@ private void fireReadTablePreEvent(String catName, String dbName, String tblName // metastore api call) Table t = getMS().getTable(catName, dbName, tblName); if (t == null) { - throw new NoSuchObjectException(getCatalogQualifiedTableName(catName, dbName, tblName) + throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tblName) + " table not found"); } firePreEvent(new PreReadTableEvent(t, this)); @@ -4926,7 +4927,7 @@ public void alter_table_with_environment_context(final String dbname, private void alter_table_core(final String catName, final String dbname, final String name, final Table newTable, final EnvironmentContext envContext) throws InvalidOperationException, MetaException { - startFunction("alter_table", ": " + getCatalogQualifiedTableName(catName, dbname, name) + startFunction("alter_table", ": " + TableName.getQualified(catName, dbname, name) + " newtbl=" + newTable.getTableName()); // Update the time if it hasn't been specified. @@ -4955,7 +4956,7 @@ private void alter_table_core(final String catName, final String dbname, final S Table oldt = get_table_core(catName, dbname, name); if (!isRenameAllowed(catName, dbname, newTable.getDbName())) { throw new MetaException("Alter table not allowed for table " + - getCatalogQualifiedTableName(catName, dbname, name) + + TableName.getQualified(catName, dbname, name) + " new table = " + getCatalogQualifiedTableName(newTable)); } firePreEvent(new PreAlterTableEvent(oldt, newTable, this)); @@ -5306,7 +5307,7 @@ private Partition get_partition_by_name_core(final RawStore ms, final String cat Partition p = ms.getPartition(catName, db_name, tbl_name, partVals); if (p == null) { - throw new NoSuchObjectException(getCatalogQualifiedTableName(catName, db_name, tbl_name) + throw new NoSuchObjectException(TableName.getQualified(catName, db_name, tbl_name) + " partition (" + part_name + ") not found"); } return p; @@ -5318,7 +5319,7 @@ public Partition get_partition_by_name(final String db_name, final String tbl_na String[] parsedDbName = parseDbName(db_name, conf); startFunction("get_partition_by_name", ": tbl=" + - getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); Partition ret = null; Exception ex = null; @@ -5345,7 +5346,7 @@ public Partition append_partition_by_name_with_environment_context(final String throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("append_partition_by_name", ": tbl=" - + getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], + + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); Partition ret = null; @@ -5395,7 +5396,7 @@ public boolean drop_partition_by_name_with_environment_context(final String db_n final EnvironmentContext envContext) throws TException { String[] parsedDbName = parseDbName(db_name, conf); startFunction("drop_partition_by_name", ": tbl=" + - getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tbl_name) + " part=" + part_name); boolean ret = false; @@ -5535,7 +5536,7 @@ public ColumnStatistics get_table_column_statistics(String dbName, String tableN tableName = tableName.toLowerCase(); colName = colName.toLowerCase(); startFunction("get_column_statistics_by_table", ": table=" + - getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + colName); ColumnStatistics statsObj = null; try { @@ -5557,7 +5558,7 @@ public TableStatsResult get_table_statistics_req(TableStatsRequest request) thro String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_table_statistics_req", ": table=" + - getCatalogQualifiedTableName(catName, dbName, tblName)); + TableName.getQualified(catName, dbName, tblName)); TableStatsResult result = null; List lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { @@ -5582,7 +5583,7 @@ public ColumnStatistics get_partition_column_statistics(String dbName, String ta colName = colName.toLowerCase(); String convertedPartName = lowerCaseConvertPartName(partName); startFunction("get_column_statistics_by_partition", ": table=" + - getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " partition=" + convertedPartName + " column=" + colName); ColumnStatistics statsObj = null; @@ -5610,7 +5611,7 @@ public PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsReques String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_partitions_statistics_req", ": table=" + - getCatalogQualifiedTableName(catName, dbName, tblName)); + TableName.getQualified(catName, dbName, tblName)); PartitionsStatsResult result = null; List lowerCaseColNames = new ArrayList<>(request.getColNames().size()); @@ -5655,7 +5656,7 @@ public boolean update_table_column_statistics(ColumnStatistics colStats) throws List statsObjs = colStats.getStatsObj(); startFunction("write_column_statistics", ": table=" + - Warehouse.getCatalogQualifiedTableName(catName, dbName, tableName)); + TableName.getQualified(catName, dbName, tableName)); for (ColumnStatisticsObj statsObj:statsObjs) { colName = statsObj.getColName().toLowerCase(); statsObj.setColName(colName); @@ -5741,7 +5742,7 @@ public boolean delete_partition_column_statistics(String dbName, String tableNam } String convertedPartName = lowerCaseConvertPartName(partName); startFunction("delete_column_statistics_by_partition",": table=" + - getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " partition=" + convertedPartName + " column=" + colName); boolean ret = false; @@ -5767,7 +5768,7 @@ public boolean delete_table_column_statistics(String dbName, String tableName, S colName = colName.toLowerCase(); } startFunction("delete_column_statistics_by_table", ": table=" + - getCatalogQualifiedTableName(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + + TableName.getQualified(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], tableName) + " column=" + colName); boolean ret = false; @@ -7258,7 +7259,7 @@ public AggrStats get_aggr_stats_for(PartitionsStatsRequest request) throws TExce String dbName = request.getDbName().toLowerCase(); String tblName = request.getTblName().toLowerCase(); startFunction("get_aggr_stats_for", ": table=" + - getCatalogQualifiedTableName(catName, dbName, tblName)); + TableName.getQualified(catName, dbName, tblName)); List lowerCaseColNames = new ArrayList<>(request.getColNames().size()); for (String colName : request.getColNames()) { @@ -7390,7 +7391,7 @@ private Table getTable(String catName, String dbName, String tableName) throws MetaException, InvalidObjectException { Table t = getMS().getTable(catName, dbName, tableName); if (t == null) { - throw new InvalidObjectException(getCatalogQualifiedTableName(catName, dbName, tableName) + throw new InvalidObjectException(TableName.getQualified(catName, dbName, tableName) + " table not found"); } return t; diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java index 285ca3f47f..51e081b22f 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java @@ -91,7 +91,6 @@ import org.apache.hadoop.hive.metastore.parser.ExpressionTree.TreeVisitor; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.ColStatsObjWithSourceInfo; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.hive.common.util.BloomFilter; import org.datanucleus.store.rdbms.query.ForwardQueryResult; import org.slf4j.Logger; @@ -2724,7 +2723,7 @@ private void dropDanglingColumnDescriptors(List columnDescriptorIdList) TableType.MANAGED_TABLE.toString(), TableType.MATERIALIZED_VIEW.toString() }; - public List getTableNamesWithStats() throws MetaException { + public List getTableNamesWithStats() throws MetaException { // Could we also join with ACID tables to only get tables with outdated stats? String queryText0 = "SELECT DISTINCT " + TBLS + ".\"TBL_NAME\", " + DBS + ".\"NAME\", " + DBS + ".\"CTLG_NAME\" FROM " + TBLS + " INNER JOIN " + DBS + " ON " @@ -2732,7 +2731,7 @@ private void dropDanglingColumnDescriptors(List columnDescriptorIdList) String queryText1 = " WHERE " + TBLS + ".\"TBL_TYPE\" IN (" + makeParams(STATS_TABLE_TYPES.length) + ")"; - List result = new ArrayList<>(); + List result = new ArrayList<>(); String queryText = queryText0 + " INNER JOIN " + TAB_COL_STATS + " ON " + TBLS + ".\"TBL_ID\" = " + TAB_COL_STATS + ".\"TBL_ID\"" + queryText1; @@ -2786,24 +2785,24 @@ private void dropDanglingColumnDescriptors(List columnDescriptorIdList) } } - public List getAllTableNamesForStats() throws MetaException { + public List getAllTableNamesForStats() throws MetaException { String queryText = "SELECT " + TBLS + ".\"TBL_NAME\", " + DBS + ".\"NAME\", " + DBS + ".\"CTLG_NAME\" FROM " + TBLS + " INNER JOIN " + DBS + " ON " + TBLS + ".\"DB_ID\" = " + DBS + ".\"DB_ID\"" + " WHERE " + TBLS + ".\"TBL_TYPE\" IN (" + makeParams(STATS_TABLE_TYPES.length) + ")"; - List result = new ArrayList<>(); + List result = new ArrayList<>(); getStatsTableListResult(queryText, result); return result; } private void getStatsTableListResult( - String queryText, List result) throws MetaException { + String queryText, List result) throws MetaException { LOG.debug("Running {}", queryText); Query query = pm.newQuery("javax.jdo.query.SQL", queryText); try { List sqlResult = ensureList(executeWithArray(query, STATS_TABLE_TYPES, queryText)); for (Object[] line : sqlResult) { - result.add(new FullTableName( + result.add(new org.apache.hadoop.hive.common.TableName( extractSqlString(line[2]), extractSqlString(line[1]), extractSqlString(line[0]))); } } finally { diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index b3a8dd0d9d..191c535899 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.metastore; import static org.apache.commons.lang.StringUtils.join; -import static org.apache.hadoop.hive.metastore.Warehouse.getCatalogQualifiedDbName; -import static org.apache.hadoop.hive.metastore.Warehouse.getCatalogQualifiedTableName; import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier; @@ -82,7 +80,9 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.DatabaseName; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.MetaStoreDirectSql.SqlFilterForPushdown; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; @@ -213,7 +213,6 @@ import org.apache.hadoop.hive.metastore.utils.FileUtils; import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.hadoop.hive.metastore.utils.ObjectPair; import org.apache.thrift.TException; import org.datanucleus.AbstractNucleusContext; @@ -1415,7 +1414,7 @@ public boolean dropTable(String catName, String dbName, String tableName) deleteTableColumnStatistics(catName, dbName, tableName, null); } catch (NoSuchObjectException e) { LOG.info("Found no table level column statistics associated with {} to delete", - getCatalogQualifiedTableName(catName, dbName, tableName)); + TableName.getQualified(catName, dbName, tableName)); } List tabConstraints = listAllTableConstraintsWithOptionalConstraintName( @@ -1557,17 +1556,17 @@ public Table getTable(String catName, String dbName, String tableName) throws Me } @Override - public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { - return new GetListHelper(null, null, null, true, false) { + public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { + return new GetListHelper(null, null, null, true, false) { @Override - protected List getSqlResult( - GetHelper> ctx) throws MetaException { + protected List getSqlResult( + GetHelper> ctx) throws MetaException { return directSql.getTableNamesWithStats(); } @Override - protected List getJdoResult( - GetHelper> ctx) throws MetaException { + protected List getJdoResult( + GetHelper> ctx) throws MetaException { throw new UnsupportedOperationException("UnsupportedOperationException"); // TODO: implement? } }.run(false); @@ -1602,20 +1601,20 @@ protected String describeResult() { } @Override - public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { - return new GetListHelper(null, null, null, true, false) { + public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { + return new GetListHelper(null, null, null, true, false) { @Override - protected List getSqlResult( - GetHelper> ctx) throws MetaException { + protected List getSqlResult( + GetHelper> ctx) throws MetaException { return directSql.getAllTableNamesForStats(); } @Override - protected List getJdoResult( - GetHelper> ctx) throws MetaException { + protected List getJdoResult( + GetHelper> ctx) throws MetaException { boolean commited = false; Query query = null; - List result = new ArrayList<>(); + List result = new ArrayList<>(); openTransaction(); try { String paramStr = "", whereStr = ""; @@ -1634,7 +1633,7 @@ protected String describeResult() { query, MetaStoreDirectSql.STATS_TABLE_TYPES); pm.retrieveAll(tbls); for (MTable tbl : tbls) { - result.add(new FullTableName( + result.add(new TableName( tbl.getDatabase().getCatalogName(), tbl.getDatabase().getName(), tbl.getTableName())); } commited = commitTransaction(); @@ -1877,7 +1876,7 @@ private AttachedMTableInfo getMTable(String catName, String db, String table, "java.lang.String table, java.lang.String db, java.lang.String catname"); query.setUnique(true); LOG.debug("Executing getMTable for " + - getCatalogQualifiedTableName(catName, db, table)); + TableName.getQualified(catName, db, table)); mtbl = (MTable) query.execute(table, db, catName); pm.retrieve(mtbl); // Retrieving CD can be expensive and unnecessary, so do it only when required. @@ -1948,7 +1947,7 @@ private MTable getMTable(String catName, String db, String table) { String dbNameIfExists = (String) dbExistsQuery.execute(db, catName); if (org.apache.commons.lang.StringUtils.isEmpty(dbNameIfExists)) { throw new UnknownDBException("Could not find database " + - getCatalogQualifiedDbName(catName, db)); + DatabaseName.getQualified(catName, db)); } } else { for (Iterator iter = mtables.iterator(); iter.hasNext(); ) { @@ -2029,7 +2028,7 @@ private MTable convertToMTable(Table tbl) throws InvalidObjectException, } catch (NoSuchObjectException e) { LOG.error("Could not convert to MTable", e); throw new InvalidObjectException("Database " + - getCatalogQualifiedDbName(catName, tbl.getDbName()) + " doesn't exist."); + DatabaseName.getQualified(catName, tbl.getDbName()) + " doesn't exist."); } // If the table has property EXTERNAL set, update table type @@ -2973,7 +2972,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter( throws MetaException, NoSuchObjectException { LOG.info("Table: {} filter: \"{}\" cols: {}", - getCatalogQualifiedTableName(catName, dbName, tableName), filter, cols); + TableName.getQualified(catName, dbName, tableName), filter, cols); List partitionNames = null; List partitions = null; Table tbl = getTable(catName, dbName, tableName); @@ -3001,7 +3000,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter( if (partitionNames == null && partitions == null) { throw new MetaException("Cannot obtain list of partitions by filter:\"" + filter + - "\" for " + getCatalogQualifiedTableName(catName, dbName, tableName)); + "\" for " + TableName.getQualified(catName, dbName, tableName)); } if (!ascending) { @@ -3193,7 +3192,7 @@ private Collection getPartitionPsQueryResults(String catName, String dbName, Str tableName = normalizeIdentifier(tableName); Table table = getTable(catName, dbName, tableName); if (table == null) { - throw new NoSuchObjectException(getCatalogQualifiedTableName(catName, dbName, tableName) + throw new NoSuchObjectException(TableName.getQualified(catName, dbName, tableName) + " table not found"); } String partNameMatcher = MetaStoreUtils.makePartNameMatcher(table, part_vals); @@ -3913,7 +3912,7 @@ private MTable ensureGetMTable(String catName, String dbName, String tblName) MTable mtable = getMTable(catName, dbName, tblName); if (mtable == null) { throw new NoSuchObjectException("Specified catalog.database.table does not exist : " - + getCatalogQualifiedTableName(catName, dbName, tblName)); + + TableName.getQualified(catName, dbName, tblName)); } return mtable; } @@ -4721,7 +4720,7 @@ private static String generateColNameTypeSignature(String colName, String colTyp if (getPrimaryKeyConstraintName(parentTable.getDatabase().getCatalogName(), parentTable.getDatabase().getName(), parentTable.getTableName()) != null) { throw new MetaException(" Primary key already exists for: " + - getCatalogQualifiedTableName(catName, tableDB, tableName)); + TableName.getQualified(catName, tableDB, tableName)); } if (pks.get(i).getPk_name() == null) { if (pks.get(i).getKey_seq() == 1) { @@ -8182,7 +8181,7 @@ private void writeMTableColumnStatistics(Table table, MTableColumnStatistics mSt try { LOG.info("Updating table level column statistics for table={}" + - " colName={}", getCatalogQualifiedTableName(table), colName); + " colName={}", Warehouse.getCatalogQualifiedTableName(table), colName); validateTableCols(table, Lists.newArrayList(colName)); if (oldStats != null) { @@ -8210,7 +8209,7 @@ private void writeMPartitionColumnStatistics(Table table, Partition partition, String colName = mStatsObj.getColName(); LOG.info("Updating partition level column statistics for table=" + - getCatalogQualifiedTableName(catName, dbName, tableName) + + TableName.getQualified(catName, dbName, tableName) + " partName=" + partName + " colName=" + colName); boolean foundCol = false; @@ -8748,7 +8747,7 @@ public boolean deletePartitionColumnStatistics(String catName, String dbName, St pm.deletePersistent(mStatsObj); } else { throw new NoSuchObjectException("Column stats doesn't exist for table=" - + getCatalogQualifiedTableName(catName, dbName, tableName) + + + TableName.getQualified(catName, dbName, tableName) + " partition=" + partName + " col=" + colName); } } else { @@ -8762,7 +8761,7 @@ public boolean deletePartitionColumnStatistics(String catName, String dbName, St pm.deletePersistentAll(mStatsObjColl); } else { throw new NoSuchObjectException("Column stats don't exist for table=" - + getCatalogQualifiedTableName(catName, dbName, tableName) + " partition" + partName); + + TableName.getQualified(catName, dbName, tableName) + " partition" + partName); } } ret = commitTransaction(); @@ -8793,7 +8792,7 @@ public boolean deleteTableColumnStatistics(String catName, String dbName, String List mStatsObjColl; if (mTable == null) { throw new NoSuchObjectException("Table " + - getCatalogQualifiedTableName(catName, dbName, tableName) + TableName.getQualified(catName, dbName, tableName) + " for which stats deletion is requested doesn't exist"); } query = pm.newQuery(MTableColumnStatistics.class); diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java index f350aa9fd7..b2d4283974 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; @@ -88,7 +89,6 @@ import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.ColStatsObjWithSourceInfo; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.thrift.TException; public interface RawStore extends Configurable { @@ -1640,9 +1640,9 @@ void alterSchemaVersion(SchemaVersionDescriptor version, SchemaVersion newVersio /** Removes outdated statistics. */ int deleteRuntimeStats(int maxRetainSecs) throws MetaException; - List getTableNamesWithStats() throws MetaException, NoSuchObjectException; + List getTableNamesWithStats() throws MetaException, NoSuchObjectException; - List getAllTableNamesForStats() throws MetaException, NoSuchObjectException; + List getAllTableNamesForStats() throws MetaException, NoSuchObjectException; Map> getPartitionColsWithStats(String catName, String dbName, String tableName) throws MetaException, NoSuchObjectException; diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java index e31935ebf5..04e9bd41ec 100755 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -31,6 +31,7 @@ import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; @@ -230,12 +231,14 @@ public Path getDefaultTablePath(Database db, String tableName) MetaStoreUtils.encodeTableName(tableName.toLowerCase()))); } + @Deprecated // Use TableName public static String getQualifiedName(Table table) { - return getQualifiedName(table.getDbName(), table.getTableName()); + return TableName.getDbTable(table.getDbName(), table.getTableName()); } + @Deprecated // Use TableName public static String getQualifiedName(String dbName, String tableName) { - return dbName + CAT_DB_TABLE_SEPARATOR + tableName; + return TableName.getDbTable(dbName, tableName); } public static String getQualifiedName(Partition partition) { @@ -248,22 +251,7 @@ public static String getQualifiedName(Partition partition) { * @return fully qualified name. */ public static String getCatalogQualifiedTableName(Table table) { - return getCatalogQualifiedTableName(table.getCatName(), table.getDbName(), table.getTableName()); - } - - /** - * Get table name in cat.db.table format. - * @param catName catalog name - * @param dbName database name - * @param tableName table name - * @return fully qualified name. - */ - public static String getCatalogQualifiedTableName(String catName, String dbName, String tableName) { - return catName + CAT_DB_TABLE_SEPARATOR + dbName + CAT_DB_TABLE_SEPARATOR + tableName; - } - - public static String getCatalogQualifiedDbName(String catName, String dbName) { - return catName + CAT_DB_TABLE_SEPARATOR + dbName; + return TableName.getQualified(table.getCatName(), table.getDbName(), table.getTableName()); } public boolean mkdirs(Path f) throws MetaException { diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java index d9356b8d9b..2f31c68cfc 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java @@ -38,7 +38,9 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.DatabaseName; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.Deadline; import org.apache.hadoop.hive.metastore.FileMetadataHandler; import org.apache.hadoop.hive.metastore.ObjectStore; @@ -116,7 +118,6 @@ import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.ColStatsObjWithSourceInfo; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.hadoop.hive.metastore.utils.StringUtils; import org.apache.thrift.TException; import org.slf4j.Logger; @@ -244,7 +245,7 @@ static void prewarm(RawStore rawStore) { } catch (NoSuchObjectException e) { // Continue with next database LOG.warn("Failed to cache database " - + Warehouse.getCatalogQualifiedDbName(catName, dbName) + ", moving on", e); + + DatabaseName.getQualified(catName, dbName) + ", moving on", e); } } } catch (MetaException e) { @@ -263,7 +264,7 @@ static void prewarm(RawStore rawStore) { tblNames = rawStore.getAllTables(catName, dbName); } catch (MetaException e) { LOG.warn("Failed to cache tables for database " - + Warehouse.getCatalogQualifiedDbName(catName, dbName) + ", moving on"); + + DatabaseName.getQualified(catName, dbName) + ", moving on"); // Continue with next database continue; } @@ -2407,7 +2408,7 @@ public long getCacheUpdateCount() { } static boolean isNotInBlackList(String catName, String dbName, String tblName) { - String str = Warehouse.getCatalogQualifiedTableName(catName, dbName, tblName); + String str = TableName.getQualified(catName, dbName, tblName); for (Pattern pattern : blacklistPatterns) { LOG.debug("Trying to match: {} against blacklist pattern: {}", str, pattern); Matcher matcher = pattern.matcher(str); @@ -2421,7 +2422,7 @@ static boolean isNotInBlackList(String catName, String dbName, String tblName) { } private static boolean isInWhitelist(String catName, String dbName, String tblName) { - String str = Warehouse.getCatalogQualifiedTableName(catName, dbName, tblName); + String str = TableName.getQualified(catName, dbName, tblName); for (Pattern pattern : whitelistPatterns) { LOG.debug("Trying to match: {} against whitelist pattern: {}", str, pattern); Matcher matcher = pattern.matcher(str); @@ -2495,12 +2496,12 @@ public int deleteRuntimeStats(int maxRetainSecs) throws MetaException { } @Override - public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { + public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { return rawStore.getTableNamesWithStats(); } @Override - public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { + public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { return rawStore.getAllTableNamesForStats(); } diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java index 6ade490405..16a8c75801 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.metastore.utils; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.WMPoolSchedulingPolicy; import com.google.common.base.Joiner; @@ -268,7 +269,7 @@ public static MetaException newMetaException(String errorMessage, Exception e) { } if (colStatsMap.size() < 1) { LOG.debug("No stats data found for: tblName= {}, partNames= {}, colNames= {}", - Warehouse.getCatalogQualifiedTableName(catName, dbName, tableName), partNames, colNames); + TableName.getQualified(catName, dbName, tableName), partNames, colNames); return new ArrayList(); } return aggrPartitionStats(colStatsMap, partNames, areAllPartsFound, @@ -1804,34 +1805,4 @@ public static String getDefaultCatalog(Configuration conf) { return catName; } - - public static class FullTableName { - public final String catalog, db, table; - - public FullTableName(String catalog, String db, String table) { - assert catalog != null && db != null && table != null : catalog + ", " + db + ", " + table; - this.catalog = catalog; - this.db = db; - this.table = table; - } - - @Override - public String toString() { - return catalog + MetaStoreUtils.CATALOG_DB_SEPARATOR + db + "." + table; - } - - @Override - public int hashCode() { - final int prime = 31; - return prime * (prime * (prime + catalog.hashCode()) + db.hashCode()) + table.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - FullTableName other = (FullTableName) obj; - return catalog.equals(other.catalog) && db.equals(other.db) && table.equals(other.table); - } - } } diff --git standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java index 8c3ada3082..58af0dfd71 100644 --- standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; @@ -86,7 +87,6 @@ import org.apache.hadoop.hive.metastore.api.WMPool; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.ColStatsObjWithSourceInfo; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.thrift.TException; /** @@ -1188,13 +1188,13 @@ public int deleteRuntimeStats(int maxRetainSecs) throws MetaException { } @Override - public List getTableNamesWithStats() throws MetaException, + public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { return null; } @Override - public List getAllTableNamesForStats() throws MetaException, + public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { return null; } diff --git standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index f98e8de4c7..182343025f 100644 --- standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; @@ -85,7 +86,6 @@ import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.FullTableName; import org.apache.thrift.TException; import org.junit.Assert; @@ -1175,13 +1175,13 @@ public int deleteRuntimeStats(int maxRetainSecs) throws MetaException { } @Override - public List getTableNamesWithStats() throws MetaException, + public List getTableNamesWithStats() throws MetaException, NoSuchObjectException { return null; } @Override - public List getAllTableNamesForStats() throws MetaException, + public List getAllTableNamesForStats() throws MetaException, NoSuchObjectException { return null; } diff --git storage-api/src/java/org/apache/hadoop/hive/common/DatabaseName.java storage-api/src/java/org/apache/hadoop/hive/common/DatabaseName.java new file mode 100644 index 0000000000..6a030bb1b9 --- /dev/null +++ storage-api/src/java/org/apache/hadoop/hive/common/DatabaseName.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common; + +/** + * A container for fully qualified database name, i.e. catalogname.databasename. Also contains + * utilities for string parsing. + */ +public class DatabaseName { + static final String CAT_DB_TABLE_SEPARATOR = "."; + private final String cat; + private final String db; + + /** + * + * @param cat catalog name. This cannot be null. If you don't know the value, then likely the + * right answer is to fetch it from SessionState.getCurrentCatalog() if you want to + * get the catalog being used in the current session or + * MetaStoreUtils.getDefaultCatalog() if you want to get the default catalog for + * this Hive instance. + * @param db database name. This cannot be null. + */ + public DatabaseName(String cat, String db) { + this.cat = cat; + this.db = db; + } + + /** + * Build a DatabaseName from a string of the form [catalog.]database. + * @param name name, can be "db" or "cat.db" + * @param defaultCatalog default catalog to use if catalog name is not in the name. This can + * be null if you are absolutely certain that the catalog name is + * embedded in name. If you want the default catalog to be determined by + * the session, use SessionState.getCurrentCatalog(). If you want it to + * be determined by the default for the Hive instance or you are not in a + * session, use MetaStoreUtils.getDefaultCatalog(). + * @return new DatabaseName object. + */ + public static DatabaseName fromString(String name, String defaultCatalog) { + if (name.contains(CAT_DB_TABLE_SEPARATOR)) { + String[] names = name.split("\\."); + if (names.length != 2) { + throw new RuntimeException("Database name must be either or ."); + } + return new DatabaseName(names[0], names[1]); + } else { + assert defaultCatalog != null; + return new DatabaseName(defaultCatalog, name); + } + } + + public String getCat() { + return cat; + } + + public String getDb() { + return db; + } + + public static String getQualified(String catName, String dbName) { + return catName + CAT_DB_TABLE_SEPARATOR + dbName; + } + + @Override + public int hashCode() { + return cat.hashCode() * 31 + db.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj != null && obj instanceof DatabaseName) { + DatabaseName that = (DatabaseName)obj; + return db.equals(that.db) && cat.equals(that.cat); + } + return false; + } + + @Override + public String toString() { + return cat + CAT_DB_TABLE_SEPARATOR + db; + } +} diff --git storage-api/src/java/org/apache/hadoop/hive/common/TableName.java storage-api/src/java/org/apache/hadoop/hive/common/TableName.java new file mode 100644 index 0000000000..f5cb192561 --- /dev/null +++ storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common; + +/** + * A container for a fully qualified table name, i.e. catalogname.databasename.tablename. Also + * includes utilities for string parsing. + */ +public class TableName { + private final String cat; + private final String db; + private final String table; + + /** + * + * @param cat catalog name. Cannot be null. If you do not know it you can get it from + * SessionState.getCurrentCatalog() if you want to use the catalog from the current + * session, or from MetaStoreUtils.getDefaultCatalog() if you do not have a session + * or want to use the default catalog for the Hive instance. + * @param db database name. Cannot be null. If you do not now it you can get it from + * SessionState.getCurrentDatabase() or use Warehouse.DEFAULT_DATABASE_NAME. + * @param table table name, cannot be null + */ + public TableName(String cat, String db, String table) { + this.cat = cat; + this.db = db; + this.table = table; + } + + /** + * Build a TableName from a string of the form [[catalog.]database.]table. + * @param name name in string form + * @param defaultCatalog default catalog to use if catalog is not in the name. If you do not + * know it you can get it from SessionState.getCurrentCatalog() if you + * want to use the catalog from the current session, or from + * MetaStoreUtils.getDefaultCatalog() if you do not have a session or + * want to use the default catalog for the Hive instance. + * @param defaultDatabase default database to use if database is not in the name. If you do + * not now it you can get it from SessionState.getCurrentDatabase() or + * use Warehouse.DEFAULT_DATABASE_NAME. + * @return TableName + */ + public static TableName fromString(String name, String defaultCatalog, String defaultDatabase) { + if (name.contains(DatabaseName.CAT_DB_TABLE_SEPARATOR)) { + String names[] = name.split("\\."); + if (names.length == 2) { + return new TableName(defaultCatalog, names[0], names[1]); + } else if (names.length == 3) { + return new TableName(names[0], names[1], names[2]); + } else { + throw new RuntimeException("Table name must be either , . " + + "or .."); + } + + } else { + return new TableName(defaultCatalog, defaultDatabase, name); + } + } + + public String getCat() { + return cat; + } + + public String getDb() { + return db; + } + + public String getTable() { + return table; + } + + /** + * Get the name in db.table format, for use with stuff not yet converted to use the catalog. + */ + public String getDbTable() { + return db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + + } + + /** + * Get the name in db.table format, for use with stuff not yet converted to use the catalog. + */ + public static String getDbTable(String dbName, String tableName) { + return dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; + + } + + public static String getQualified(String catName, String dbName, String tableName) { + return catName + DatabaseName.CAT_DB_TABLE_SEPARATOR + dbName + DatabaseName.CAT_DB_TABLE_SEPARATOR + tableName; + } + + @Override + public int hashCode() { + return (cat.hashCode() * 31 + db.hashCode()) * 31 + table.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj != null && obj instanceof TableName) { + TableName that = (TableName)obj; + return table.equals(that.table) && db.equals(that.db) && cat.equals(that.cat); + } + return false; + } + + @Override + public String toString() { + return cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table; + } +} diff --git storage-api/src/test/org/apache/hadoop/hive/common/TestDatabaseName.java storage-api/src/test/org/apache/hadoop/hive/common/TestDatabaseName.java new file mode 100644 index 0000000000..76c3a70760 --- /dev/null +++ storage-api/src/test/org/apache/hadoop/hive/common/TestDatabaseName.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common; + +import org.junit.Assert; +import org.junit.Test; + +public class TestDatabaseName { + + @Test + public void differentFromConf() { + String cat = "cat"; + String db = "db"; + DatabaseName dbName = new DatabaseName(cat, db); + Assert.assertEquals(cat, dbName.getCat()); + Assert.assertEquals(db, dbName.getDb()); + Assert.assertEquals("cat.db", dbName.toString()); + } + + @Test + public void fromString() { + DatabaseName dbName = DatabaseName.fromString("cat.db", null); + Assert.assertEquals("cat", dbName.getCat()); + Assert.assertEquals("db", dbName.getDb()); + dbName = DatabaseName.fromString("db", "cat"); + Assert.assertEquals("cat", dbName.getCat()); + Assert.assertEquals("db", dbName.getDb()); + } +} diff --git storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java new file mode 100644 index 0000000000..0a8cb2a82e --- /dev/null +++ storage-api/src/test/org/apache/hadoop/hive/common/TestTableName.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common; + +import org.junit.Assert; +import org.junit.Test; + +public class TestTableName { + @Test + public void fullname() { + TableName name = new TableName("cat", "db", "t"); + Assert.assertEquals("cat", name.getCat()); + Assert.assertEquals("db", name.getDb()); + Assert.assertEquals("t", name.getTable()); + Assert.assertEquals("cat.db.t", name.toString()); + Assert.assertEquals("db.t", name.getDbTable()); + } + + @Test + public void fromString() { + TableName name = TableName.fromString("cat.db.tab", null, null); + Assert.assertEquals("cat", name.getCat()); + Assert.assertEquals("db", name.getDb()); + Assert.assertEquals("tab", name.getTable()); + + name = TableName.fromString("db.tab", "cat", null); + Assert.assertEquals("cat", name.getCat()); + Assert.assertEquals("db", name.getDb()); + Assert.assertEquals("tab", name.getTable()); + + name = TableName.fromString("tab", "cat", "db"); + Assert.assertEquals("cat", name.getCat()); + Assert.assertEquals("db", name.getDb()); + Assert.assertEquals("tab", name.getTable()); + } +}