.../apache/hadoop/hive/metastore/ObjectStore.java | 324 ++++++++++----------- 1 file changed, 158 insertions(+), 166 deletions(-) diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index e9e7d44..074853e 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -68,6 +68,8 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; + +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.hadoop.classification.InterfaceAudience; @@ -219,7 +221,7 @@ * Verify the schema only once per JVM since the db connection info is static */ private final static AtomicBoolean isSchemaVerified = new AtomicBoolean(false); - private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class); private enum TXN_STATUS { NO_STATE, OPEN, COMMITED, ROLLBACK @@ -248,11 +250,7 @@ } HOSTNAME = hostname; String user = System.getenv("USER"); - if (user == null) { - USER = "UNKNOWN"; - } else { - USER = user; - } + USER = org.apache.commons.lang.StringUtils.defaultString(user, "UNKNOWN"); } @@ -342,7 +340,7 @@ public void setConf(Configuration conf) { String partitionValidationRegex = MetastoreConf.getVar(this.conf, ConfVars.PARTITION_NAME_WHITELIST_PATTERN); - if (partitionValidationRegex != null && !partitionValidationRegex.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isNotEmpty(partitionValidationRegex)) { partitionValidationPattern = Pattern.compile(partitionValidationRegex); } else { partitionValidationPattern = null; @@ -389,13 +387,13 @@ private void initialize(Properties dsProps) { numTries--; boolean retriable = isRetriableException(e); if ((numTries > 0) && retriable){ - LOG.info("Retriable exception while instantiating ObjectStore, retrying. " - + numTries + " tries left", e); + LOG.info("Retriable exception while instantiating ObjectStore, retrying. " + + "{} tries left", numTries, e); try { Thread.sleep(retryInterval); } catch (InterruptedException ie) { // Restore the interrupted status, since we do not want to catch it. - LOG.debug("Interrupted while sleeping before retrying.",ie); + LOG.debug("Interrupted while sleeping before retrying.", ie); Thread.currentThread().interrupt(); } // If we're here, we'll proceed down the next while loop iteration. @@ -460,14 +458,14 @@ private void initializeHelper(Properties dsProps) { expressionProxy = createExpressionProxy(conf); if (MetastoreConf.getBoolVar(getConf(), ConfVars.TRY_DIRECT_SQL)) { String schema = prop.getProperty("javax.jdo.mapping.Schema"); - if (schema != null && schema.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isEmpty(schema)) { schema = null; } directSql = new MetaStoreDirectSql(pm, conf, schema); } } - LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm + - " created in the thread with id: " + Thread.currentThread().getId()); + LOG.debug("RawStore: {}, with PersistenceManager: {}" + + " created in the thread with id: {}", this, pm, Thread.currentThread().getId()); } private DatabaseProduct determineDatabaseProduct() { @@ -525,7 +523,7 @@ private static void configureSSL(Configuration conf) { if (pair != null && pair.length == 2) { System.setProperty(pair[0].trim(), pair[1].trim()); } else { - LOG.warn("Invalid metastore property value for " + ConfVars.DBACCESS_SSL_PROPS); + LOG.warn("Invalid metastore property value for {}", ConfVars.DBACCESS_SSL_PROPS); } } } @@ -544,10 +542,11 @@ private static Properties getDataSourceProps(Configuration conf) { // has to be a separate first step because we don't set the default values in the config object. for (ConfVars var : MetastoreConf.dataNucleusAndJdoConfs) { String confVal = MetastoreConf.getAsString(conf, var); - Object prevVal = prop.setProperty(var.getVarname(), confVal); - if (LOG.isDebugEnabled() && MetastoreConf.isPrintable(var.getVarname())) { - LOG.debug("Overriding " + var.getVarname() + " value " + prevVal - + " from jpox.properties with " + confVal); + String varName = var.getVarname(); + Object prevVal = prop.setProperty(varName, confVal); + if (MetastoreConf.isPrintable(varName)) { + LOG.debug("Overriding {} value {} from jpox.properties with {}", + varName, prevVal, confVal); } } @@ -572,7 +571,7 @@ private static Properties getDataSourceProps(Configuration conf) { // Password may no longer be in the conf, use getPassword() try { String passwd = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.PWD); - if (passwd != null && !passwd.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isNotEmpty(passwd)) { // We can get away with the use of varname here because varname == hiveName for PWD prop.setProperty(ConfVars.PWD.getVarname(), passwd); } @@ -583,7 +582,7 @@ private static Properties getDataSourceProps(Configuration conf) { if (LOG.isDebugEnabled()) { for (Entry e : prop.entrySet()) { if (MetastoreConf.isPrintable(e.getKey().toString())) { - LOG.debug(e.getKey() + " = " + e.getValue()); + LOG.debug("{} = {}", e.getKey(), e.getValue()); } } } @@ -635,7 +634,7 @@ private static synchronized PersistenceManagerFactory getPMF() { DataStoreCache dsc = pmf.getDataStoreCache(); if (dsc != null) { String objTypes = MetastoreConf.getVar(conf, ConfVars.CACHE_PINOBJTYPES); - LOG.info("Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"" + objTypes + "\""); + LOG.info("Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"{}\"", objTypes); if (objTypes != null && objTypes.length() > 0) { objTypes = objTypes.toLowerCase(); String[] typeTokens = objTypes.split(","); @@ -665,8 +664,7 @@ public PersistenceManager getPersistenceManager() { @Override public void shutdown() { if (pm != null) { - LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm + - " will be shutdown"); + LOG.debug("RawStore: {}, with PersistenceManager: {} will be shutdown", this, pm); pm.close(); pm = null; } @@ -833,7 +831,7 @@ public Database getDatabase(String name) throws NoSuchObjectException { ex = e; } if (db == null) { - LOG.warn("Failed to get database " + name +", returning NoSuchObjectException", ex); + LOG.warn("Failed to get database {}, returning NoSuchObjectException", name, ex); throw new NoSuchObjectException(name + (ex == null ? "" : (": " + ex.getMessage()))); } return db; @@ -872,7 +870,7 @@ public Database getJDODatabase(String name) throws NoSuchObjectException { db.setParameters(convertMap(mdb.getParameters())); db.setOwnerName(mdb.getOwnerName()); String type = mdb.getOwnerType(); - db.setOwnerType((null == type || type.trim().isEmpty()) ? null : PrincipalType.valueOf(type)); + db.setOwnerType(org.apache.commons.lang.StringUtils.isBlank(type) ? null : PrincipalType.valueOf(type)); return db; } @@ -918,7 +916,7 @@ public boolean alterDatabase(String dbName, Database db) @Override public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException { boolean success = false; - LOG.info("Dropping database " + dbname + " along with all tables"); + LOG.info("Dropping database {} along with all tables", dbname); dbname = normalizeIdentifier(dbname); QueryWrapper queryWrapper = new QueryWrapper(); try { @@ -929,7 +927,7 @@ public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaExc pm.retrieve(db); if (db != null) { List dbGrants = this.listDatabaseGrants(dbname, queryWrapper); - if (dbGrants != null && dbGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(dbGrants)) { pm.deletePersistentAll(dbGrants); } pm.deletePersistent(db); @@ -1077,7 +1075,7 @@ public boolean dropType(String typeName) { success = commitTransaction(); } catch (JDOObjectNotFoundException e) { success = commitTransaction(); - LOG.debug("type not found " + typeName, e); + LOG.debug("type not found {}", typeName, e); } finally { rollbackAndCleanup(success, query); } @@ -1185,36 +1183,36 @@ public boolean dropTable(String dbName, String tableName) throws MetaException, if (tbl != null) { // first remove all the grants List tabGrants = listAllTableGrants(dbName, tableName); - if (tabGrants != null && tabGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tabGrants)) { pm.deletePersistentAll(tabGrants); } List tblColGrants = listTableAllColumnGrants(dbName, tableName); - if (tblColGrants != null && tblColGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tblColGrants)) { pm.deletePersistentAll(tblColGrants); } List partGrants = this.listTableAllPartitionGrants(dbName, tableName); - if (partGrants != null && partGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partGrants)) { pm.deletePersistentAll(partGrants); } List partColGrants = listTableAllPartitionColumnGrants(dbName, tableName); - if (partColGrants != null && partColGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partColGrants)) { pm.deletePersistentAll(partColGrants); } // delete column statistics if present try { deleteTableColumnStatistics(dbName, tableName, null); } catch (NoSuchObjectException e) { - LOG.info("Found no table level column statistics associated with db " + dbName + - " table " + tableName + " record to delete"); + LOG.info("Found no table level column statistics associated with db {}" + + " table {} record to delete", dbName, tableName); } List tabConstraints = listAllTableConstraintsWithOptionalConstraintName( dbName, tableName, null); - if (tabConstraints != null && tabConstraints.size() > 0) { + if (CollectionUtils.isNotEmpty(tabConstraints)) { pm.deletePersistentAll(tabConstraints); } @@ -1384,7 +1382,7 @@ private int getObjectCount(String fieldName, String objName) { if (tableNames != null && !tableNames.equals("*")) { appendPatternCondition(filterBuilder, "tableName", tableNames, parameterVals); } - if (tableTypes != null && !tableTypes.isEmpty()) { + if (CollectionUtils.isNotEmpty(tableTypes)) { appendSimpleCondition(filterBuilder, "tableType", tableTypes.toArray(new String[0]), parameterVals); } @@ -1510,7 +1508,7 @@ private MTable getMTable(String db, String table) { dbExistsQuery.setUnique(true); dbExistsQuery.setResult("name"); String dbNameIfExists = (String) dbExistsQuery.execute(db); - if (dbNameIfExists == null || dbNameIfExists.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isEmpty(dbNameIfExists)) { throw new UnknownDBException("Could not find database " + db); } @@ -1852,7 +1850,7 @@ public boolean addPartitions(String dbName, String tblName, List part } } } - if (toPersist.size() > 0) { + if (CollectionUtils.isNotEmpty(toPersist)) { pm.makePersistentAll(toPersist); pm.flush(); } @@ -1975,7 +1973,7 @@ public boolean addPartition(Partition part) throws InvalidObjectException, toPersist.add(partColumn); } - if (toPersist.size() > 0) { + if (CollectionUtils.isNotEmpty(toPersist)) { pm.makePersistentAll(toPersist); } } @@ -2033,7 +2031,7 @@ private MPartition getMPartition(String dbName, String tableName, List p // We need to compare partition name with requested name since some DBs // (like MySQL, Derby) considers 'a' = 'a ' whereas others like (Postgres, // Oracle) doesn't exhibit this problem. - if (mparts != null && mparts.size() > 0) { + if (CollectionUtils.isNotEmpty(mparts)) { if (mparts.size() > 1) { throw new MetaException( "Expecting only one partition but more than one partitions are found."); @@ -2138,7 +2136,7 @@ public boolean dropPartition(String dbName, String tableName, @Override public void dropPartitions(String dbName, String tblName, List partNames) throws MetaException, NoSuchObjectException { - if (partNames.isEmpty()) return; + if (CollectionUtils.isEmpty(partNames)) return; boolean success = false; openTransaction(); try { @@ -2190,7 +2188,7 @@ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectExceptio part.getTable().getTableName(), Lists.newArrayList(partName)); - if (partGrants != null && partGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partGrants)) { pm.deletePersistentAll(partGrants); } @@ -2198,7 +2196,7 @@ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectExceptio part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName)); - if (partColumnGrants != null && partColumnGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partColumnGrants)) { pm.deletePersistentAll(partColumnGrants); } @@ -2263,7 +2261,7 @@ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectExceptio openTransaction(); List mparts = listMPartitions(dbName, tblName, max, queryWrapper); List parts = new ArrayList<>(mparts.size()); - if (mparts != null && mparts.size()>0) { + if (CollectionUtils.isNotEmpty(mparts)) { for (MPartition mpart : mparts) { MTable mtbl = mpart.getTable(); Partition part = convertToPart(mpart); @@ -2413,16 +2411,16 @@ public PartitionValuesResponse listPartitionValues(String dbName, String tableNa dbName = dbName.toLowerCase().trim(); tableName = tableName.toLowerCase().trim(); try { - if (filter == null || filter.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isEmpty(filter)) { PartitionValuesResponse response = getDistinctValuesForPartitionsNoTxn(dbName, tableName, cols, applyDistinct, ascending, maxParts); - LOG.info("Number of records fetched: " + response.getPartitionValues().size()); + LOG.info("Number of records fetched: {}", response.getPartitionValues().size()); return response; } else { PartitionValuesResponse response = extractPartitionNamesByFilter(dbName, tableName, filter, cols, ascending, applyDistinct, maxParts); if (response != null && response.getPartitionValues() != null) { - LOG.info("Number of records fetched with filter: " + response.getPartitionValues().size()); + LOG.info("Number of records fetched with filter: {}", response.getPartitionValues().size()); } return response; } @@ -2437,8 +2435,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str List cols, boolean ascending, boolean applyDistinct, long maxParts) throws MetaException, NoSuchObjectException { - LOG.info("Database: " + dbName + " Table:" + tableName + " filter\"" + filter + "\" cols:" + cols); - List partitionResults = new ArrayList(); + LOG.info("Database: {} Table: {} filter: \"{}\" cols: {}", dbName, tableName, filter, cols); List partitionNames = null; List partitions = null; Table tbl = getTable(dbName, tableName); @@ -2446,7 +2443,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str // Get partitions by name - ascending or descending partitionNames = getPartitionNamesByFilter(dbName, tableName, filter, ascending, maxParts); } catch (MetaException e) { - LOG.warn("Querying by partition names failed, trying out with partition objects, filter:" + filter); + LOG.warn("Querying by partition names failed, trying out with partition objects, filter: {}", filter); } if (partitionNames == null) { @@ -2475,7 +2472,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str // Return proper response PartitionValuesResponse response = new PartitionValuesResponse(); response.setPartitionValues(new ArrayList(partitionNames.size())); - LOG.info("Converting responses to Partition values for items:" + partitionNames.size()); + LOG.info("Converting responses to Partition values for items: {}", partitionNames.size()); for (String partName : partitionNames) { ArrayList vals = new ArrayList(tbl.getPartitionKeys().size()); for (FieldSchema key : tbl.getPartitionKeys()) { @@ -2520,9 +2517,10 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str query.setRange(0, maxParts); } - LOG.debug("Filter specified is " + filter + "," + - " JDOQL filter is " + queryFilterString); - LOG.debug("Parms is " + params); + LOG.debug("Filter specified is {}, JDOQL filter is {}", filter, + queryFilterString); + + LOG.debug("Parms is {}", params); String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); @@ -2541,7 +2539,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str LOG.debug("Done executing query for getPartitionNamesByFilter"); success = commitTransaction(); - LOG.debug("Done retrieving all objects for getPartitionNamesByFilter, size:" + partNames.size()); + LOG.debug("Done retrieving all objects for getPartitionNamesByFilter, size: {}", partNames.size()); query.closeAll(); } finally { if (!success) { @@ -2686,7 +2684,7 @@ private Collection getPartitionPsQueryResults(String dbName, String tableName, // User specified a row limit, set it on the Query query.setRange(0, max_parts); } - if (resultsCol != null && !resultsCol.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isNotEmpty(resultsCol)) { query.setResult(resultsCol); } @@ -2768,7 +2766,7 @@ private Collection getPartitionPsQueryResults(String dbName, String tableName, LOG.debug("Done executing query for listMPartitions"); pm.retrieveAll(mparts); success = commitTransaction(); - LOG.debug("Done retrieving all objects for listMPartitions " + mparts); + LOG.debug("Done retrieving all objects for listMPartitions {}", mparts); } finally { if (!success) { rollbackTransaction(); @@ -2866,7 +2864,7 @@ private boolean getPartitionNamesPrunedByExprNoTxn(Table table, byte[] expr, String defaultPartName, short maxParts, List result) throws MetaException { result.addAll(getPartitionNamesNoTxn( table.getDbName(), table.getTableName(), maxParts)); - if (defaultPartName == null || defaultPartName.isEmpty()) { + if (org.apache.commons.lang.StringUtils.isEmpty(defaultPartName)) { defaultPartName = MetastoreConf.getVar(getConf(), ConfVars.DEFAULTPARTITIONNAME); } return expressionProxy.filterPartitionsByExpr(table.getPartitionKeys(), expr, defaultPartName, result); @@ -2940,8 +2938,8 @@ private Integer getNumPartitionsViaOrmFilter(Table table, ExpressionTree tree, b */ private List getPartitionsViaOrmFilter( String dbName, String tblName, List partNames) throws MetaException { - if (partNames.isEmpty()) { - return new ArrayList<>(); + if (CollectionUtils.isEmpty(partNames)) { + return Collections.emptyList(); } ObjectPair> queryWithParams = getPartQueryWithParams(dbName, tblName, partNames); @@ -2964,7 +2962,7 @@ private void dropPartitionsNoTxn(String dbName, String tblName, List par Query query = queryWithParams.getFirst(); query.setClass(MPartition.class); long deleted = query.deletePersistentAll(queryWithParams.getSecond()); - LOG.debug("Deleted " + deleted + " partition from store"); + LOG.debug("Deleted {} partition from store", deleted); query.closeAll(); } @@ -3003,7 +3001,7 @@ private void dropPartitionsNoTxn(String dbName, String tblName, List par int n = 0; Map params = new HashMap<>(); for (Iterator itr = partNames.iterator(); itr.hasNext();) { - String pn = "p" + n; + String pn = "p".concat(Integer.toString(n)); n++; String part = itr.next(); params.put(pn, part); @@ -3014,7 +3012,7 @@ private void dropPartitionsNoTxn(String dbName, String tblName, List par sb.append(')'); Query query = pm.newQuery(); query.setFilter(sb.toString()); - LOG.debug(" JDOQL filter is " + sb.toString()); + LOG.debug(" JDOQL filter is {}", sb); params.put("t1", normalizeIdentifier(tblName)); params.put("t2", normalizeIdentifier(dbName)); query.declareParameters(makeParameterDeclarationString(params)); @@ -3264,7 +3262,7 @@ protected String describeResult() { @Override public int getNumPartitionsByFilter(String dbName, String tblName, String filter) throws MetaException, NoSuchObjectException { - final ExpressionTree exprTree = (filter != null && !filter.isEmpty()) + final ExpressionTree exprTree = (org.apache.commons.lang.StringUtils.isNotEmpty(filter) ? PartFilterExprUtil.getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE; return new GetHelper(dbName, tblName, true, true) { @@ -3344,7 +3342,7 @@ protected Integer getJdoResult( protected List getPartitionsByFilterInternal(String dbName, String tblName, String filter, final short maxParts, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { - final ExpressionTree tree = (filter != null && !filter.isEmpty()) + final ExpressionTree tree = (org.apache.commons.lang.StringUtils.isNotEmpty(filter)) ? PartFilterExprUtil.getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE; return new GetListHelper(dbName, tblName, allowSql, allowJdo) { private final SqlFilterForPushdown filter = new SqlFilterForPushdown(); @@ -3400,7 +3398,7 @@ private Table ensureGetTable( */ private String makeQueryFilterString(String dbName, MTable mtable, String filter, Map params) throws MetaException { - ExpressionTree tree = (filter != null && !filter.isEmpty()) + ExpressionTree tree = (org.apache.commons.lang.StringUtils.isNotEmpty(filter) ? PartFilterExprUtil.getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE; return makeQueryFilterString(dbName, convertToTable(mtable), tree, params, true); } @@ -3432,11 +3430,11 @@ private String makeQueryFilterString(String dbName, Table table, ExpressionTree tree.generateJDOFilterFragment(getConf(), table, params, queryBuilder); if (queryBuilder.hasError()) { assert !isValidatedFilter; - LOG.info("JDO filter pushdown cannot be used: " + queryBuilder.getErrorMessage()); + LOG.info("JDO filter pushdown cannot be used: {}", queryBuilder.getErrorMessage()); return null; } String jdoFilter = queryBuilder.getFilter(); - LOG.debug("jdoFilter = " + jdoFilter); + LOG.debug("jdoFilter = {}", jdoFilter); return jdoFilter; } @@ -3455,7 +3453,7 @@ private String makeParameterDeclarationStringObj(Map params) { for (Entry entry : params.entrySet()) { paramDecl.append(", "); paramDecl.append(entry.getValue().getClass().getName()); - paramDecl.append(" "); + paramDecl.append(' '); paramDecl.append(entry.getKey()); } return paramDecl.toString(); @@ -3480,10 +3478,12 @@ private String makeParameterDeclarationStringObj(Map params) { if (maxTables >= 0) { query.setRange(0, maxTables); } - LOG.debug("filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); - for (Entry entry : params.entrySet()) { - LOG.debug("key: " + entry.getKey() + " value: " + entry.getValue() + " class: " - + entry.getValue().getClass().getName()); + LOG.debug("filter specified is {}, JDOQL filter is {}", filter, queryFilterString); + if (LOG.isDebugEnabled()) { + for (Entry entry : params.entrySet()) { + LOG.debug("key: {} value: {} class: {}", entry.getKey(), entry.getValue(), + entry.getValue().getClass().getName()); + } } String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); @@ -3530,14 +3530,14 @@ private String makeParameterDeclarationStringObj(Map params) { // User specified a row limit, set it on the Query query.setRange(0, maxParts); } - LOG.debug("Filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); - LOG.debug("Parms is " + params); + LOG.debug("Filter specified is {}, JDOQL filter is {}", filter, queryFilterString); + LOG.debug("Parms is {}", params); String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setOrdering("partitionName ascending"); query.setResult("partitionName"); Collection names = (Collection) query.executeWithMap(params); - partNames = new ArrayList<>(); + partNames = new ArrayList<>(names.size()); for (Iterator i = names.iterator(); i.hasNext();) { partNames.add((String) i.next()); } @@ -3834,7 +3834,7 @@ private boolean constraintNameAlreadyExists(String name) { } finally { rollbackAndCleanup(commited, constraintExistsQuery); } - return constraintNameIfExists != null && !constraintNameIfExists.isEmpty(); + return org.apache.commons.lang.StringUtils.isNotEmpty(constraintNameIfExists); } private String generateConstraintName(String... parameters) throws MetaException { @@ -3874,7 +3874,7 @@ private String createDbGuidAndPersist() throws MetaException { MMetastoreDBProperties prop = new MMetastoreDBProperties(); prop.setPropertykey("guid"); final String guid = UUID.randomUUID().toString(); - LOG.debug("Attempting to add a guid " + guid + " for the metastore db"); + LOG.debug("Attempting to add a guid {} for the metastore db", guid); prop.setPropertyValue(guid); prop.setDescription("Metastore DB GUID generated on " + LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"))); @@ -3910,7 +3910,7 @@ private String getGuidFromDB() throws MetaException { List uuids = new ArrayList<>(); for (Iterator i = names.iterator(); i.hasNext();) { String uuid = i.next().getPropertyValue(); - LOG.debug("Found guid " + uuid); + LOG.debug("Found guid {}", uuid); uuids.add(uuid); } success = commitTransaction(); @@ -3918,7 +3918,7 @@ private String getGuidFromDB() throws MetaException { throw new MetaException("Multiple uuids found"); } if(!uuids.isEmpty()) { - LOG.debug("Returning guid of metastore db : " + uuids.get(0)); + LOG.debug("Returning guid of metastore db : {}", uuids.get(0)); return uuids.get(0); } } finally { @@ -3933,7 +3933,7 @@ private String getGuidFromDB() throws MetaException { throws InvalidObjectException, MetaException { List fkNames = new ArrayList<>(); - if (foreignKeys.size() > 0) { + if (CollectionUtils.isNotEmpty(foreignKeys)) { List mpkfks = new ArrayList<>(); String currentConstraintName = null; // We start iterating through the foreign keys. This list might contain more than a single @@ -4634,48 +4634,48 @@ public boolean removeRole(String roleName) throws MetaException, // first remove all the membership, the membership that this role has // been granted List roleMap = listMRoleMembers(mRol.getRoleName()); - if (roleMap.size() > 0) { + if (CollectionUtils.isNotEmpty(roleMap)) { pm.deletePersistentAll(roleMap); } List roleMember = listMSecurityPrincipalMembershipRole(mRol .getRoleName(), PrincipalType.ROLE, queryWrapper); - if (roleMember.size() > 0) { + if (CollectionUtils.isNotEmpty(roleMember)) { pm.deletePersistentAll(roleMember); } queryWrapper.close(); // then remove all the grants List userGrants = listPrincipalMGlobalGrants( mRol.getRoleName(), PrincipalType.ROLE); - if (userGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(userGrants)) { pm.deletePersistentAll(userGrants); } List dbGrants = listPrincipalAllDBGrant(mRol .getRoleName(), PrincipalType.ROLE, queryWrapper); - if (dbGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(dbGrants)) { pm.deletePersistentAll(dbGrants); } queryWrapper.close(); List tabPartGrants = listPrincipalAllTableGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (tabPartGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tabPartGrants)) { pm.deletePersistentAll(tabPartGrants); } queryWrapper.close(); List partGrants = listPrincipalAllPartitionGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (partGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partGrants)) { pm.deletePersistentAll(partGrants); } queryWrapper.close(); List tblColumnGrants = listPrincipalAllTableColumnGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (tblColumnGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tblColumnGrants)) { pm.deletePersistentAll(tblColumnGrants); } queryWrapper.close(); List partColumnGrants = listPrincipalAllPartitionColumnGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (partColumnGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partColumnGrants)) { pm.deletePersistentAll(partColumnGrants); } queryWrapper.close(); @@ -4892,7 +4892,7 @@ public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, openTransaction(); if (userName != null) { List user = this.listPrincipalMGlobalGrants(userName, PrincipalType.USER); - if(user.size()>0) { + if(CollectionUtils.isNotEmpty(user)) { Map> userPriv = new HashMap<>(); List grantInfos = new ArrayList<>(user.size()); for (int i = 0; i < user.size(); i++) { @@ -4905,12 +4905,12 @@ public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, ret.setUserPrivileges(userPriv); } } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> groupPriv = new HashMap<>(); for(String groupName: groupNames) { List group = this.listPrincipalMGlobalGrants(groupName, PrincipalType.GROUP); - if(group.size()>0) { + if(CollectionUtils.isNotEmpty(group)) { List grantInfos = new ArrayList<>(group.size()); for (int i = 0; i < group.size(); i++) { MGlobalPrivilege item = group.get(i); @@ -4940,7 +4940,7 @@ public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, if (principalName != null) { List userNameDbPriv = this.listPrincipalMDBGrants( principalName, principalType, dbName); - if (userNameDbPriv != null && userNameDbPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameDbPriv)) { List grantInfos = new ArrayList<>( userNameDbPriv.size()); for (int i = 0; i < userNameDbPriv.size(); i++) { @@ -4972,7 +4972,7 @@ public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, PrincipalType.USER)); ret.setUserPrivileges(dbUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> dbGroupPriv = new HashMap<>(); for (String groupName : groupNames) { dbGroupPriv.put(groupName, getDBPrivilege(dbName, groupName, @@ -4981,7 +4981,7 @@ public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, ret.setGroupPrivileges(dbGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> dbRolePriv = new HashMap<>(); for (String roleName : roleNames) { dbRolePriv @@ -5015,7 +5015,7 @@ public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, tableName, partition, userName, PrincipalType.USER)); ret.setUserPrivileges(partUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> partGroupPriv = new HashMap<>(); for (String groupName : groupNames) { partGroupPriv.put(groupName, getPartitionPrivilege(dbName, tableName, @@ -5024,7 +5024,7 @@ public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, ret.setGroupPrivileges(partGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> partRolePriv = new HashMap<>(); for (String roleName : roleNames) { partRolePriv.put(roleName, getPartitionPrivilege(dbName, tableName, @@ -5058,7 +5058,7 @@ public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, tableName, userName, PrincipalType.USER)); ret.setUserPrivileges(tableUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> tableGroupPriv = new HashMap<>(); for (String groupName : groupNames) { tableGroupPriv.put(groupName, getTablePrivilege(dbName, tableName, @@ -5067,7 +5067,7 @@ public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, ret.setGroupPrivileges(tableGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> tableRolePriv = new HashMap<>(); for (String roleName : roleNames) { tableRolePriv.put(roleName, getTablePrivilege(dbName, tableName, @@ -5103,7 +5103,7 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, columnName, partitionName, userName, PrincipalType.USER)); ret.setUserPrivileges(columnUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> columnGroupPriv = new HashMap<>(); for (String groupName : groupNames) { columnGroupPriv.put(groupName, getColumnPrivilege(dbName, tableName, @@ -5112,7 +5112,7 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, ret.setGroupPrivileges(columnGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> columnRolePriv = new HashMap<>(); for (String roleName : roleNames) { columnRolePriv.put(roleName, getColumnPrivilege(dbName, tableName, @@ -5140,7 +5140,7 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, List userNameTabPartPriv = this .listPrincipalMPartitionGrants(principalName, principalType, dbName, tableName, partName); - if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameTabPartPriv)) { List grantInfos = new ArrayList<>( userNameTabPartPriv.size()); for (int i = 0; i < userNameTabPartPriv.size(); i++) { @@ -5169,7 +5169,7 @@ private PrincipalType getPrincipalTypeFromStr(String str) { List userNameTabPartPriv = this .listAllMTableGrants(principalName, principalType, dbName, tableName); - if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameTabPartPriv)) { List grantInfos = new ArrayList<>( userNameTabPartPriv.size()); for (int i = 0; i < userNameTabPartPriv.size(); i++) { @@ -5196,7 +5196,7 @@ private PrincipalType getPrincipalTypeFromStr(String str) { List userNameColumnPriv = this .listPrincipalMTableColumnGrants(principalName, principalType, dbName, tableName, columnName); - if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameColumnPriv)) { List grantInfos = new ArrayList<>( userNameColumnPriv.size()); for (int i = 0; i < userNameColumnPriv.size(); i++) { @@ -5211,7 +5211,7 @@ private PrincipalType getPrincipalTypeFromStr(String str) { List userNameColumnPriv = this .listPrincipalMPartitionColumnGrants(principalName, principalType, dbName, tableName, partitionName, columnName); - if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameColumnPriv)) { List grantInfos = new ArrayList<>( userNameColumnPriv.size()); for (int i = 0; i < userNameColumnPriv.size(); i++) { @@ -5237,7 +5237,7 @@ public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectExce List privilegeList = privileges.getPrivileges(); - if (privilegeList != null && privilegeList.size() > 0) { + if (CollectionUtils.isNotEmpty(privilegeList)) { Iterator privIter = privilegeList.iterator(); Set privSet = new HashSet<>(); while (privIter.hasNext()) { @@ -5429,7 +5429,7 @@ public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectExce } } } - if (persistentObjs.size() > 0) { + if (CollectionUtils.isNotEmpty(persistentObjs)) { pm.makePersistentAll(persistentObjs); } committed = commitTransaction(); @@ -5452,7 +5452,7 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) List privilegeList = privileges.getPrivileges(); - if (privilegeList != null && privilegeList.size() > 0) { + if (CollectionUtils.isNotEmpty(privilegeList)) { Iterator privIter = privilegeList.iterator(); while (privIter.hasNext()) { @@ -5668,7 +5668,7 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } } - if (persistentObjs.size() > 0) { + if (CollectionUtils.isNotEmpty(persistentObjs)) { if (grantOption) { // If grant option specified, only update the privilege, don't remove it. // Grant option has already been removed from the privileges in the section above @@ -5766,12 +5766,11 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) PrincipalType principalType) { List mUsers = listPrincipalMGlobalGrants(principalName, principalType); - if (mUsers.isEmpty()) { + if (CollectionUtils.isEmpty(mUsers)) { return Collections.emptyList(); } - List result = new ArrayList<>(); - for (int i = 0; i < mUsers.size(); i++) { - MGlobalPrivilege sUsr = mUsers.get(i); + List result = new ArrayList<>(mUsers.size()); + for (MGlobalPrivilege sUsr : mUsers) { HiveObjectRef objectRef = new HiveObjectRef( HiveObjectType.GLOBAL, null, null, null, null); HiveObjectPrivilege secUser = new HiveObjectPrivilege( @@ -5849,12 +5848,11 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) PrincipalType principalType, String dbName) { List mDbs = listPrincipalMDBGrants(principalName, principalType, dbName); - if (mDbs.isEmpty()) { + if (CollectionUtils.isEmpty(mDbs)) { return Collections.emptyList(); } - List result = new ArrayList<>(); - for (int i = 0; i < mDbs.size(); i++) { - MDBPrivilege sDB = mDbs.get(i); + List result = new ArrayList<>(mDbs.size()); + for (MDBPrivilege sDB : mDbs) { HiveObjectRef objectRef = new HiveObjectRef( HiveObjectType.DATABASE, dbName, null, null, null); HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, @@ -6208,12 +6206,11 @@ private void dropPartitionGrantsNoTxn(String dbName, String tableName, List mTbls = listAllMTableGrants(principalName, principalType, dbName, tableName); - if (mTbls.isEmpty()) { + if (CollectionUtils.isEmpty(mTbls)) { return Collections.emptyList(); } List result = new ArrayList<>(); - for (int i = 0; i < mTbls.size(); i++) { - MTablePrivilege sTbl = mTbls.get(i); + for (MTablePrivilege sTbl : mTbls) { HiveObjectRef objectRef = new HiveObjectRef( HiveObjectType.TABLE, dbName, tableName, null, null); HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, @@ -6337,8 +6334,7 @@ private void dropPartitionGrantsNoTxn(String dbName, String tableName, List result = new ArrayList<>(); - for (int i = 0; i < mTableCols.size(); i++) { - MTableColumnPrivilege sCol = mTableCols.get(i); + for (MTableColumnPrivilege sCol : mTableCols) { HiveObjectRef objectRef = new HiveObjectRef( HiveObjectType.COLUMN, dbName, tableName, null, sCol.getColumnName()); HiveObjectPrivilege secObj = new HiveObjectPrivilege( @@ -6398,12 +6394,11 @@ private void dropPartitionGrantsNoTxn(String dbName, String tableName, List mPartitionCols = listPrincipalMPartitionColumnGrants(principalName, principalType, dbName, tableName, partitionName, columnName); - if (mPartitionCols.isEmpty()) { + if (CollectionUtils.isEmpty(mPartitionCols)) { return Collections.emptyList(); } List result = new ArrayList<>(); - for (int i = 0; i < mPartitionCols.size(); i++) { - MPartitionColumnPrivilege sCol = mPartitionCols.get(i); + for (MPartitionColumnPrivilege sCol : mPartitionCols) { HiveObjectRef objectRef = new HiveObjectRef( HiveObjectType.COLUMN, dbName, tableName, partValues, sCol.getColumnName()); HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, @@ -6850,7 +6845,7 @@ public boolean isPartitionMarkedForEvent(String dbName, String tblName, success = commitTransaction(); LOG.debug("Done executing isPartitionMarkedForEvent"); - return (partEvents != null && !partEvents.isEmpty()) ? true : false; + return CollectionUtils.isNotEmpty(partEvents); } finally { rollbackAndCleanup(success, query); } @@ -7390,8 +7385,8 @@ private void writeMTableColumnStatistics(Table table, MTableColumnStatistics mSt QueryWrapper queryWrapper = new QueryWrapper(); try { - LOG.info("Updating table level column statistics for db=" + dbName + " tableName=" + tableName - + " colName=" + colName); + LOG.info("Updating table level column statistics for db={} tableName={}" + + " colName={}", tableName, dbName, colName); validateTableCols(table, Lists.newArrayList(colName)); if (oldStats != null) { @@ -7593,8 +7588,8 @@ public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List getMTableColumnStatistics(Table table, List colNames, QueryWrapper queryWrapper) throws MetaException { - if (colNames == null || colNames.isEmpty()) { - return null; + if (CollectionUtils.isEmpty(colNames)) { + return Collections.emptyList(); } boolean committed = false; @@ -7679,7 +7674,7 @@ protected ColumnStatistics getJdoResult( try { List mStats = getMTableColumnStatistics(getTable(), colNames, queryWrapper); - if (mStats.isEmpty()) return null; + if (CollectionUtils.isEmpty(mStats)) return null; // LastAnalyzed is stored per column, but thrift object has it per multiple columns. // Luckily, nobody actually uses it, so we will set to lowest value of all columns for now. ColumnStatisticsDesc desc = StatObjectConverter.getTableColumnStatisticsDesc(mStats.get(0)); @@ -7889,9 +7884,8 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, InvalidObjectException, InvalidInputException { boolean ret = false; Query query = null; - if (dbName == null) { - dbName = Warehouse.DEFAULT_DATABASE_NAME; - } + dbName = org.apache.commons.lang.StringUtils.defaultString(dbName, + Warehouse.DEFAULT_DATABASE_NAME); if (tableName == null) { throw new InvalidInputException("Table name is null."); } @@ -7967,9 +7961,8 @@ public boolean deleteTableColumnStatistics(String dbName, String tableName, Stri throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean ret = false; Query query = null; - if (dbName == null) { - dbName = Warehouse.DEFAULT_DATABASE_NAME; - } + dbName = org.apache.commons.lang.StringUtils.defaultString(dbName, + Warehouse.DEFAULT_DATABASE_NAME); if (tableName == null) { throw new InvalidInputException("Table name is null."); } @@ -8084,7 +8077,7 @@ public boolean addToken(String tokenId, String delegationToken) { rollbackTransaction(); } } - LOG.debug("Done executing addToken with status : " + committed); + LOG.debug("Done executing addToken with status : {}", committed); return committed && (token == null); } @@ -8106,7 +8099,7 @@ public boolean removeToken(String tokenId) { rollbackTransaction(); } } - LOG.debug("Done executing removeToken with status : " + committed); + LOG.debug("Done executing removeToken with status : {}", committed); return committed && (token != null); } @@ -8128,7 +8121,7 @@ public String getToken(String tokenId) { rollbackTransaction(); } } - LOG.debug("Done executing getToken with status : " + committed); + LOG.debug("Done executing getToken with status : {}", committed); return (null == token) ? null : token.getTokenStr(); } @@ -8151,7 +8144,7 @@ public String getToken(String tokenId) { } return tokenIdents; } finally { - LOG.debug("Done executing getAllTokenIdentifers with status : " + committed); + LOG.debug("Done executing getAllTokenIdentifers with status : {}", committed); rollbackAndCleanup(committed, query); } } @@ -8170,7 +8163,7 @@ public int addMasterKey(String key) throws MetaException{ rollbackTransaction(); } } - LOG.debug("Done executing addMasterKey with status : " + committed); + LOG.debug("Done executing addMasterKey with status : {}", committed); if (committed) { return ((IntIdentity)pm.getObjectId(masterKey)).getKey(); } else { @@ -8197,7 +8190,7 @@ public void updateMasterKey(Integer id, String key) throws NoSuchObjectException } finally { rollbackAndCleanup(committed, query); } - LOG.debug("Done executing updateMasterKey with status : " + committed); + LOG.debug("Done executing updateMasterKey with status : {}", committed); if (null == masterKey) { throw new NoSuchObjectException("No key found with keyId: " + id); } @@ -8225,7 +8218,7 @@ public boolean removeMasterKey(Integer id) { } finally { rollbackAndCleanup(success, query); } - LOG.debug("Done executing removeMasterKey with status : " + success); + LOG.debug("Done executing removeMasterKey with status : {}", success); return (null != masterKey) && success; } @@ -8248,7 +8241,7 @@ public boolean removeMasterKey(Integer id) { } return masterKeys; } finally { - LOG.debug("Done executing getMasterKeys with status : " + committed); + LOG.debug("Done executing getMasterKeys with status : {}", committed); rollbackAndCleanup(committed, query); } } @@ -8282,18 +8275,17 @@ private synchronized void checkSchema() throws MetaException { if (dbSchemaVer == null) { if (strictValidation) { - throw new MetaException("Version information not found in metastore. "); + throw new MetaException("Version information not found in metastore."); } else { - LOG.warn("Version information not found in metastore. " - + ConfVars.SCHEMA_VERIFICATION.toString() + - " is not enabled so recording the schema version " + + LOG.warn("Version information not found in metastore. {} is not " + + "enabled so recording the schema version {}", ConfVars.SCHEMA_VERIFICATION, hiveSchemaVer); setMetaStoreSchemaVersion(hiveSchemaVer, "Set by MetaStore " + USER + "@" + HOSTNAME); } } else { if (metastoreSchemaInfo.isVersionCompatible(hiveSchemaVer, dbSchemaVer)) { - LOG.debug("Found expected HMS version of " + dbSchemaVer); + LOG.debug("Found expected HMS version of {}", dbSchemaVer); } else { // metastore schema version is different than Hive distribution needs if (strictValidation) { @@ -8301,9 +8293,9 @@ private synchronized void checkSchema() throws MetaException { " does not match metastore's schema version " + dbSchemaVer + " Metastore is not upgraded or corrupt"); } else { - LOG.error("Version information found in metastore differs " + dbSchemaVer + - " from expected schema version " + hiveSchemaVer + - ". Schema verififcation is disabled " + ConfVars.SCHEMA_VERIFICATION); + LOG.error("Version information found in metastore differs {} " + + "from expected schema version {}. Schema verififcation is disabled {}", + dbSchemaVer, hiveSchemaVer, ConfVars.SCHEMA_VERIFICATION); setMetaStoreSchemaVersion(hiveSchemaVer, "Set by MetaStore " + USER + "@" + HOSTNAME); } @@ -8372,10 +8364,10 @@ public void setMetaStoreSchemaVersion(String schemaVersion, String comment) thro MetastoreConf.getBoolVar(getConf(), ConfVars.SCHEMA_VERIFICATION_RECORD_VERSION); if (!recordVersion) { LOG.warn("setMetaStoreSchemaVersion called but recording version is disabled: " + - "version = " + schemaVersion + ", comment = " + comment); + "version = {}, comment = {}", schemaVersion, comment); return; } - LOG.warn("Setting metastore schema version in db to " + schemaVersion); + LOG.warn("Setting metastore schema version in db to {}", schemaVersion); try { mSchemaVer = getMSchemaVersion(); @@ -8409,7 +8401,7 @@ public boolean doesPartitionExist(String dbName, String tableName, List private void debugLog(String message) { if (LOG.isDebugEnabled()) { - LOG.debug(message + getCallStack()); + LOG.debug("{} {}", message, getCallStack()); } } @@ -8463,7 +8455,7 @@ private MFunction convertToMFunction(Function func) throws InvalidObjectExceptio try { mdb = getMDatabase(func.getDbName()); } catch (NoSuchObjectException e) { - LOG.error(StringUtils.stringifyException(e)); + LOG.error("Database does not exist", e); throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist."); } @@ -8725,8 +8717,8 @@ public void run() throws MetaException { break; } catch (Exception e) { LOG.info( - "Attempting to acquire the DB log notification lock: " + currentRetries + " out of " - + maxRetries + " retries", e); + "Attempting to acquire the DB log notification lock: {} out of {}" + + " retries", currentRetries, maxRetries, e); if (currentRetries >= maxRetries) { String message = "Couldn't acquire the DB log notification lock because we reached the maximum" @@ -8765,7 +8757,7 @@ public void addNotificationEvent(NotificationEvent entry) { Collection ids = (Collection) objectQuery.execute(); MNotificationNextId mNotificationNextId = null; boolean needToPersistId; - if (ids == null || ids.size() == 0) { + if (CollectionUtils.isEmpty(ids)) { mNotificationNextId = new MNotificationNextId(1L); needToPersistId = true; } else { @@ -8797,7 +8789,7 @@ public void cleanNotificationEvents(int olderThan) { query = pm.newQuery(MNotificationLog.class, "eventTime < tooOld"); query.declareParameters("java.lang.Integer tooOld"); Collection toBeRemoved = (Collection) query.execute(tooOld); - if (toBeRemoved != null && toBeRemoved.size() > 0) { + if (CollectionUtils.isNotEmpty(toBeRemoved)) { pm.deletePersistentAll(toBeRemoved); } commited = commitTransaction(); @@ -8815,7 +8807,7 @@ public CurrentNotificationEventId getCurrentNotificationEventId() { query = pm.newQuery(MNotificationNextId.class); Collection ids = (Collection) query.execute(); long id = 0; - if (ids != null && ids.size() > 0) { + if (CollectionUtils.isNotEmpty(ids)) { id = ids.iterator().next().getNextEventId() - 1; } commited = commitTransaction(); @@ -8977,7 +8969,7 @@ private static void clearOutPmfClassLoaderCache(PersistenceManagerFactory pmf) { classLoaderResolverMap.set(nc, new HashMap()); LOG.debug("Removed cached classloaders from DataNucleus NucleusContext"); } catch (Exception e) { - LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext ", e); + LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext", e); } } @@ -8988,8 +8980,8 @@ private static void clearClr(ClassLoaderResolver clr) throws Exception { long resourcesCleared = clearFieldMap(clri,"resources"); long loadedClassesCleared = clearFieldMap(clri,"loadedClasses"); long unloadedClassesCleared = clearFieldMap(clri, "unloadedClasses"); - LOG.debug("Cleared ClassLoaderResolverImpl: " + - resourcesCleared + "," + loadedClassesCleared + "," + unloadedClassesCleared); + LOG.debug("Cleared ClassLoaderResolverImpl: {}, {}, {}", + resourcesCleared, loadedClassesCleared, unloadedClassesCleared); } } } @@ -9370,7 +9362,7 @@ public void dropConstraint(String dbName, String tableName, List tabConstraints = listAllTableConstraintsWithOptionalConstraintName( dbName, tableName, constraintName); - if (tabConstraints != null && tabConstraints.size() > 0) { + if (CollectionUtils.isNotEmpty(tabConstraints)) { pm.deletePersistentAll(tabConstraints); } else { throw new NoSuchObjectException("The constraint: " + constraintName +