.../apache/hadoop/hive/metastore/ObjectStore.java | 275 ++++++++++----------- 1 file changed, 137 insertions(+), 138 deletions(-) diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 7334a0c..1683884 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -80,6 +80,8 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; + +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.hadoop.classification.InterfaceAudience; @@ -228,7 +230,7 @@ * Verify the schema only once per JVM since the db connection info is static */ private final static AtomicBoolean isSchemaVerified = new AtomicBoolean(false); - private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class); private enum TXN_STATUS { NO_STATE, OPEN, COMMITED, ROLLBACK @@ -257,11 +259,7 @@ } HOSTNAME = hostname; String user = System.getenv("USER"); - if (user == null) { - USER = "UNKNOWN"; - } else { - USER = user; - } + USER = org.apache.commons.lang.StringUtils.defaultString(user, "UNKNOWN"); } @@ -398,13 +396,13 @@ private void initialize(Properties dsProps) { numTries--; boolean retriable = isRetriableException(e); if ((numTries > 0) && retriable){ - LOG.info("Retriable exception while instantiating ObjectStore, retrying. " - + numTries + " tries left", e); + LOG.info("Retriable exception while instantiating ObjectStore, retrying. " + + "{} tries left", numTries, e); try { Thread.sleep(retryInterval); } catch (InterruptedException ie) { // Restore the interrupted status, since we do not want to catch it. - LOG.debug("Interrupted while sleeping before retrying.",ie); + LOG.debug("Interrupted while sleeping before retrying.", ie); Thread.currentThread().interrupt(); } // If we're here, we'll proceed down the next while loop iteration. @@ -475,8 +473,8 @@ private void initializeHelper(Properties dsProps) { directSql = new MetaStoreDirectSql(pm, conf, schema); } } - LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm + - " created in the thread with id: " + Thread.currentThread().getId()); + LOG.debug("RawStore: {}, with PersistenceManager: {}" + + " created in the thread with id: {}", this, pm, Thread.currentThread().getId()); } private DatabaseProduct determineDatabaseProduct() { @@ -534,7 +532,7 @@ private static void configureSSL(Configuration conf) { if (pair != null && pair.length == 2) { System.setProperty(pair[0].trim(), pair[1].trim()); } else { - LOG.warn("Invalid metastore property value for " + ConfVars.DBACCESS_SSL_PROPS); + LOG.warn("Invalid metastore property value for {}", ConfVars.DBACCESS_SSL_PROPS); } } } @@ -553,10 +551,11 @@ private static Properties getDataSourceProps(Configuration conf) { // has to be a separate first step because we don't set the default values in the config object. for (ConfVars var : MetastoreConf.dataNucleusAndJdoConfs) { String confVal = MetastoreConf.getAsString(conf, var); - Object prevVal = prop.setProperty(var.getVarname(), confVal); - if (LOG.isDebugEnabled() && MetastoreConf.isPrintable(var.getVarname())) { - LOG.debug("Overriding " + var.getVarname() + " value " + prevVal - + " from jpox.properties with " + confVal); + String varName = var.getVarname(); + Object prevVal = prop.setProperty(varName, confVal); + if (MetastoreConf.isPrintable(varName)) { + LOG.debug("Overriding {} value {} from jpox.properties with {}", + varName, prevVal, confVal); } } @@ -592,7 +591,7 @@ private static Properties getDataSourceProps(Configuration conf) { if (LOG.isDebugEnabled()) { for (Entry e : prop.entrySet()) { if (MetastoreConf.isPrintable(e.getKey().toString())) { - LOG.debug(e.getKey() + " = " + e.getValue()); + LOG.debug("{} = {}", e.getKey(), e.getValue()); } } } @@ -644,7 +643,7 @@ private static synchronized PersistenceManagerFactory getPMF() { DataStoreCache dsc = pmf.getDataStoreCache(); if (dsc != null) { String objTypes = MetastoreConf.getVar(conf, ConfVars.CACHE_PINOBJTYPES); - LOG.info("Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"" + objTypes + "\""); + LOG.info("Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"{}\"", objTypes); if (objTypes != null && objTypes.length() > 0) { objTypes = objTypes.toLowerCase(); String[] typeTokens = objTypes.split(","); @@ -654,7 +653,7 @@ private static synchronized PersistenceManagerFactory getPMF() { dsc.pinAll(true, PINCLASSMAP.get(type)); } else { - LOG.warn(type + " is not one of the pinnable object types: " + org.apache.commons.lang.StringUtils.join(PINCLASSMAP.keySet(), " ")); + LOG.warn("{} is not one of the pinnable object types: {}", type, org.apache.commons.lang.StringUtils.join(PINCLASSMAP.keySet(), " ")); } } } @@ -673,9 +672,8 @@ public PersistenceManager getPersistenceManager() { @Override public void shutdown() { + LOG.debug("RawStore: {}, with PersistenceManager: {} will be shutdown", this, pm); if (pm != null) { - LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm + - " will be shutdown"); pm.close(); pm = null; } @@ -842,7 +840,7 @@ public Database getDatabase(String name) throws NoSuchObjectException { ex = e; } if (db == null) { - LOG.warn("Failed to get database " + name +", returning NoSuchObjectException", ex); + LOG.warn("Failed to get database {}, returning NoSuchObjectException", name, ex); throw new NoSuchObjectException(name + (ex == null ? "" : (": " + ex.getMessage()))); } return db; @@ -927,7 +925,7 @@ public boolean alterDatabase(String dbName, Database db) @Override public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException { boolean success = false; - LOG.info("Dropping database " + dbname + " along with all tables"); + LOG.info("Dropping database {} along with all tables", dbname); dbname = normalizeIdentifier(dbname); QueryWrapper queryWrapper = new QueryWrapper(); try { @@ -938,7 +936,7 @@ public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaExc pm.retrieve(db); if (db != null) { List dbGrants = this.listDatabaseGrants(dbname, queryWrapper); - if (dbGrants != null && dbGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(dbGrants)) { pm.deletePersistentAll(dbGrants); } pm.deletePersistent(db); @@ -1086,7 +1084,7 @@ public boolean dropType(String typeName) { success = commitTransaction(); } catch (JDOObjectNotFoundException e) { success = commitTransaction(); - LOG.debug("type not found " + typeName, e); + LOG.debug("type not found {}", typeName, e); } finally { rollbackAndCleanup(success, query); } @@ -1194,36 +1192,36 @@ public boolean dropTable(String dbName, String tableName) throws MetaException, if (tbl != null) { // first remove all the grants List tabGrants = listAllTableGrants(dbName, tableName); - if (tabGrants != null && tabGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tabGrants)) { pm.deletePersistentAll(tabGrants); } List tblColGrants = listTableAllColumnGrants(dbName, tableName); - if (tblColGrants != null && tblColGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tblColGrants)) { pm.deletePersistentAll(tblColGrants); } List partGrants = this.listTableAllPartitionGrants(dbName, tableName); - if (partGrants != null && partGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partGrants)) { pm.deletePersistentAll(partGrants); } List partColGrants = listTableAllPartitionColumnGrants(dbName, tableName); - if (partColGrants != null && partColGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partColGrants)) { pm.deletePersistentAll(partColGrants); } // delete column statistics if present try { deleteTableColumnStatistics(dbName, tableName, null); } catch (NoSuchObjectException e) { - LOG.info("Found no table level column statistics associated with db " + dbName + - " table " + tableName + " record to delete"); + LOG.info("Found no table level column statistics associated with db {}" + + " table {} record to delete", dbName, tableName); } List tabConstraints = listAllTableConstraintsWithOptionalConstraintName( dbName, tableName, null); - if (tabConstraints != null && tabConstraints.size() > 0) { + if (CollectionUtils.isNotEmpty(tabConstraints)) { pm.deletePersistentAll(tabConstraints); } @@ -1861,7 +1859,7 @@ public boolean addPartitions(String dbName, String tblName, List part } } } - if (toPersist.size() > 0) { + if (CollectionUtils.isNotEmpty(toPersist)) { pm.makePersistentAll(toPersist); pm.flush(); } @@ -1984,7 +1982,7 @@ public boolean addPartition(Partition part) throws InvalidObjectException, toPersist.add(partColumn); } - if (toPersist.size() > 0) { + if (CollectionUtils.isNotEmpty(toPersist)) { pm.makePersistentAll(toPersist); } } @@ -2042,7 +2040,7 @@ private MPartition getMPartition(String dbName, String tableName, List p // We need to compare partition name with requested name since some DBs // (like MySQL, Derby) considers 'a' = 'a ' whereas others like (Postgres, // Oracle) doesn't exhibit this problem. - if (mparts != null && mparts.size() > 0) { + if (CollectionUtils.isNotEmpty(mparts)) { if (mparts.size() > 1) { throw new MetaException( "Expecting only one partition but more than one partitions are found."); @@ -2199,7 +2197,7 @@ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectExceptio part.getTable().getTableName(), Lists.newArrayList(partName)); - if (partGrants != null && partGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partGrants)) { pm.deletePersistentAll(partGrants); } @@ -2207,7 +2205,7 @@ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectExceptio part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName)); - if (partColumnGrants != null && partColumnGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partColumnGrants)) { pm.deletePersistentAll(partColumnGrants); } @@ -2272,7 +2270,7 @@ private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectExceptio openTransaction(); List mparts = listMPartitions(dbName, tblName, max, queryWrapper); List parts = new ArrayList<>(mparts.size()); - if (mparts != null && mparts.size()>0) { + if (CollectionUtils.isNotEmpty(mparts)) { for (MPartition mpart : mparts) { MTable mtbl = mpart.getTable(); Partition part = convertToPart(mpart); @@ -2425,13 +2423,13 @@ public PartitionValuesResponse listPartitionValues(String dbName, String tableNa if (filter == null || filter.isEmpty()) { PartitionValuesResponse response = getDistinctValuesForPartitionsNoTxn(dbName, tableName, cols, applyDistinct, ascending, maxParts); - LOG.info("Number of records fetched: " + response.getPartitionValues().size()); + LOG.info("Number of records fetched: {}", response.getPartitionValues().size()); return response; } else { PartitionValuesResponse response = extractPartitionNamesByFilter(dbName, tableName, filter, cols, ascending, applyDistinct, maxParts); if (response != null && response.getPartitionValues() != null) { - LOG.info("Number of records fetched with filter: " + response.getPartitionValues().size()); + LOG.info("Number of records fetched with filter: {}", response.getPartitionValues().size()); } return response; } @@ -2446,8 +2444,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str List cols, boolean ascending, boolean applyDistinct, long maxParts) throws MetaException, NoSuchObjectException { - LOG.info("Database: " + dbName + " Table:" + tableName + " filter\"" + filter + "\" cols:" + cols); - List partitionResults = new ArrayList(); + LOG.info("Database: {} Table: {} filter: \"{}\" cols: {}", dbName, tableName, filter, cols); List partitionNames = null; List partitions = null; Table tbl = getTable(dbName, tableName); @@ -2455,7 +2452,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str // Get partitions by name - ascending or descending partitionNames = getPartitionNamesByFilter(dbName, tableName, filter, ascending, maxParts); } catch (MetaException e) { - LOG.warn("Querying by partition names failed, trying out with partition objects, filter:" + filter); + LOG.warn("Querying by partition names failed, trying out with partition objects, filter: {}", filter); } if (partitionNames == null) { @@ -2484,7 +2481,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str // Return proper response PartitionValuesResponse response = new PartitionValuesResponse(); response.setPartitionValues(new ArrayList(partitionNames.size())); - LOG.info("Converting responses to Partition values for items:" + partitionNames.size()); + LOG.info("Converting responses to Partition values for items: {}", partitionNames.size()); for (String partName : partitionNames) { ArrayList vals = new ArrayList(tbl.getPartitionKeys().size()); for (FieldSchema key : tbl.getPartitionKeys()) { @@ -2529,9 +2526,10 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str query.setRange(0, maxParts); } - LOG.debug("Filter specified is " + filter + "," + - " JDOQL filter is " + queryFilterString); - LOG.debug("Parms is " + params); + LOG.debug("Filter specified is {}, JDOQL filter is {}", filter, + queryFilterString); + + LOG.debug("Parms is {}", params); String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); @@ -2550,7 +2548,7 @@ private PartitionValuesResponse extractPartitionNamesByFilter(String dbName, Str LOG.debug("Done executing query for getPartitionNamesByFilter"); success = commitTransaction(); - LOG.debug("Done retrieving all objects for getPartitionNamesByFilter, size:" + partNames.size()); + LOG.debug("Done retrieving all objects for getPartitionNamesByFilter, size: {}", partNames.size()); query.closeAll(); } finally { if (!success) { @@ -2777,7 +2775,7 @@ private Collection getPartitionPsQueryResults(String dbName, String tableName, LOG.debug("Done executing query for listMPartitions"); pm.retrieveAll(mparts); success = commitTransaction(); - LOG.debug("Done retrieving all objects for listMPartitions " + mparts); + LOG.debug("Done retrieving all objects for listMPartitions {}", mparts); } finally { if (!success) { rollbackTransaction(); @@ -2973,7 +2971,7 @@ private void dropPartitionsNoTxn(String dbName, String tblName, List par Query query = queryWithParams.getFirst(); query.setClass(MPartition.class); long deleted = query.deletePersistentAll(queryWithParams.getSecond()); - LOG.debug("Deleted " + deleted + " partition from store"); + LOG.debug("Deleted {} partition from store", deleted); query.closeAll(); } @@ -3023,7 +3021,7 @@ private void dropPartitionsNoTxn(String dbName, String tblName, List par sb.append(')'); Query query = pm.newQuery(); query.setFilter(sb.toString()); - LOG.debug(" JDOQL filter is " + sb.toString()); + LOG.debug(" JDOQL filter is {}", sb); params.put("t1", normalizeIdentifier(tblName)); params.put("t2", normalizeIdentifier(dbName)); query.declareParameters(makeParameterDeclarationString(params)); @@ -3209,8 +3207,11 @@ private String generateShorterMessage(Exception ex) { private T commit() { success = commitTransaction(); if (doTrace) { - LOG.debug(describeResult() + " retrieved using " + (doUseDirectSql ? "SQL" : "ORM") - + " in " + ((System.nanoTime() - start) / 1000000.0) + "ms"); + double time = ((System.nanoTime() - start) / 1000000.0); + String result = describeResult(); + String retrieveType = doUseDirectSql ? "SQL" : "ORM"; + + LOG.debug("{} retrieved using {} in {}ms", result, retrieveType, time); } return results; } @@ -3254,7 +3255,7 @@ public GetDbHelper( @Override protected String describeResult() { - return "db details for db " + dbName; + return "db details for db ".concat(dbName); } } @@ -3273,7 +3274,7 @@ protected String describeResult() { @Override public int getNumPartitionsByFilter(String dbName, String tblName, String filter) throws MetaException, NoSuchObjectException { - final ExpressionTree exprTree = (filter != null && !filter.isEmpty()) + final ExpressionTree exprTree = org.apache.commons.lang.StringUtils.isEmpty(filter) ? PartFilterExprUtil.getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE; return new GetHelper(dbName, tblName, true, true) { @@ -3441,11 +3442,11 @@ private String makeQueryFilterString(String dbName, Table table, ExpressionTree tree.generateJDOFilterFragment(getConf(), table, params, queryBuilder); if (queryBuilder.hasError()) { assert !isValidatedFilter; - LOG.info("JDO filter pushdown cannot be used: " + queryBuilder.getErrorMessage()); + LOG.info("JDO filter pushdown cannot be used: {}", queryBuilder.getErrorMessage()); return null; } String jdoFilter = queryBuilder.getFilter(); - LOG.debug("jdoFilter = " + jdoFilter); + LOG.debug("jdoFilter = {}", jdoFilter); return jdoFilter; } @@ -3464,7 +3465,7 @@ private String makeParameterDeclarationStringObj(Map params) { for (Entry entry : params.entrySet()) { paramDecl.append(", "); paramDecl.append(entry.getValue().getClass().getName()); - paramDecl.append(" "); + paramDecl.append(' '); paramDecl.append(entry.getKey()); } return paramDecl.toString(); @@ -3489,10 +3490,12 @@ private String makeParameterDeclarationStringObj(Map params) { if (maxTables >= 0) { query.setRange(0, maxTables); } - LOG.debug("filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); - for (Entry entry : params.entrySet()) { - LOG.debug("key: " + entry.getKey() + " value: " + entry.getValue() + " class: " - + entry.getValue().getClass().getName()); + LOG.debug("filter specified is {}, JDOQL filter is {}", filter, queryFilterString); + if (LOG.isDebugEnabled()) { + for (Entry entry : params.entrySet()) { + LOG.debug("key: {} value: {} class: {}", entry.getKey(), entry.getValue(), + entry.getValue().getClass().getName()); + } } String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); @@ -3539,14 +3542,14 @@ private String makeParameterDeclarationStringObj(Map params) { // User specified a row limit, set it on the Query query.setRange(0, maxParts); } - LOG.debug("Filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString); - LOG.debug("Parms is " + params); + LOG.debug("Filter specified is {}, JDOQL filter is {}", filter, queryFilterString); + LOG.debug("Parms is {}", params); String parameterDeclaration = makeParameterDeclarationStringObj(params); query.declareParameters(parameterDeclaration); query.setOrdering("partitionName ascending"); query.setResult("partitionName"); Collection names = (Collection) query.executeWithMap(params); - partNames = new ArrayList<>(); + partNames = new ArrayList<>(names.size()); for (Iterator i = names.iterator(); i.hasNext();) { partNames.add((String) i.next()); } @@ -3805,8 +3808,7 @@ private static MFieldSchema getColumnFromTableColumns(List cols, S if (cols == null) { return null; } - for (int i = 0; i < cols.size(); i++) { - MFieldSchema mfs = cols.get(i); + for (MFieldSchema mfs : cols) { if (mfs.getName().equalsIgnoreCase(col)) { return mfs; } @@ -3883,18 +3885,18 @@ private String createDbGuidAndPersist() throws MetaException { MMetastoreDBProperties prop = new MMetastoreDBProperties(); prop.setPropertykey("guid"); final String guid = UUID.randomUUID().toString(); - LOG.debug("Attempting to add a guid " + guid + " for the metastore db"); + LOG.debug("Attempting to add a guid {} for the metastore db", guid); prop.setPropertyValue(guid); prop.setDescription("Metastore DB GUID generated on " + LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"))); pm.makePersistent(prop); success = commitTransaction(); if (success) { - LOG.info("Metastore db guid " + guid + " created successfully"); + LOG.info("Metastore db guid {} created successfully", guid); return guid; } } catch (Exception e) { - LOG.warn(e.getMessage(), e); + LOG.warn("Metastore db guid creation failed", e); } finally { rollbackAndCleanup(success, query); } @@ -3919,7 +3921,7 @@ private String getGuidFromDB() throws MetaException { List uuids = new ArrayList<>(); for (Iterator i = names.iterator(); i.hasNext();) { String uuid = i.next().getPropertyValue(); - LOG.debug("Found guid " + uuid); + LOG.debug("Found guid {}", uuid); uuids.add(uuid); } success = commitTransaction(); @@ -3927,7 +3929,7 @@ private String getGuidFromDB() throws MetaException { throw new MetaException("Multiple uuids found"); } if(!uuids.isEmpty()) { - LOG.debug("Returning guid of metastore db : " + uuids.get(0)); + LOG.debug("Returning guid of metastore db : {}", uuids.get(0)); return uuids.get(0); } } finally { @@ -3942,7 +3944,7 @@ private String getGuidFromDB() throws MetaException { throws InvalidObjectException, MetaException { List fkNames = new ArrayList<>(); - if (foreignKeys.size() > 0) { + if (CollectionUtils.isNotEmpty(foreignKeys)) { List mpkfks = new ArrayList<>(); String currentConstraintName = null; // We start iterating through the foreign keys. This list might contain more than a single @@ -4643,48 +4645,48 @@ public boolean removeRole(String roleName) throws MetaException, // first remove all the membership, the membership that this role has // been granted List roleMap = listMRoleMembers(mRol.getRoleName()); - if (roleMap.size() > 0) { + if (CollectionUtils.isNotEmpty(roleMap)) { pm.deletePersistentAll(roleMap); } List roleMember = listMSecurityPrincipalMembershipRole(mRol .getRoleName(), PrincipalType.ROLE, queryWrapper); - if (roleMember.size() > 0) { + if (CollectionUtils.isNotEmpty(roleMember)) { pm.deletePersistentAll(roleMember); } queryWrapper.close(); // then remove all the grants List userGrants = listPrincipalMGlobalGrants( mRol.getRoleName(), PrincipalType.ROLE); - if (userGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(userGrants)) { pm.deletePersistentAll(userGrants); } List dbGrants = listPrincipalAllDBGrant(mRol .getRoleName(), PrincipalType.ROLE, queryWrapper); - if (dbGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(dbGrants)) { pm.deletePersistentAll(dbGrants); } queryWrapper.close(); List tabPartGrants = listPrincipalAllTableGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (tabPartGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tabPartGrants)) { pm.deletePersistentAll(tabPartGrants); } queryWrapper.close(); List partGrants = listPrincipalAllPartitionGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (partGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partGrants)) { pm.deletePersistentAll(partGrants); } queryWrapper.close(); List tblColumnGrants = listPrincipalAllTableColumnGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (tblColumnGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(tblColumnGrants)) { pm.deletePersistentAll(tblColumnGrants); } queryWrapper.close(); List partColumnGrants = listPrincipalAllPartitionColumnGrants( mRol.getRoleName(), PrincipalType.ROLE, queryWrapper); - if (partColumnGrants.size() > 0) { + if (CollectionUtils.isNotEmpty(partColumnGrants)) { pm.deletePersistentAll(partColumnGrants); } queryWrapper.close(); @@ -4901,7 +4903,7 @@ public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, openTransaction(); if (userName != null) { List user = this.listPrincipalMGlobalGrants(userName, PrincipalType.USER); - if(user.size()>0) { + if(CollectionUtils.isNotEmpty(user)) { Map> userPriv = new HashMap<>(); List grantInfos = new ArrayList<>(user.size()); for (int i = 0; i < user.size(); i++) { @@ -4914,12 +4916,12 @@ public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, ret.setUserPrivileges(userPriv); } } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> groupPriv = new HashMap<>(); for(String groupName: groupNames) { List group = this.listPrincipalMGlobalGrants(groupName, PrincipalType.GROUP); - if(group.size()>0) { + if(CollectionUtils.isNotEmpty(group)) { List grantInfos = new ArrayList<>(group.size()); for (int i = 0; i < group.size(); i++) { MGlobalPrivilege item = group.get(i); @@ -4949,7 +4951,7 @@ public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, if (principalName != null) { List userNameDbPriv = this.listPrincipalMDBGrants( principalName, principalType, dbName); - if (userNameDbPriv != null && userNameDbPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameDbPriv)) { List grantInfos = new ArrayList<>( userNameDbPriv.size()); for (int i = 0; i < userNameDbPriv.size(); i++) { @@ -4981,7 +4983,7 @@ public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, PrincipalType.USER)); ret.setUserPrivileges(dbUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> dbGroupPriv = new HashMap<>(); for (String groupName : groupNames) { dbGroupPriv.put(groupName, getDBPrivilege(dbName, groupName, @@ -4990,7 +4992,7 @@ public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, ret.setGroupPrivileges(dbGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> dbRolePriv = new HashMap<>(); for (String roleName : roleNames) { dbRolePriv @@ -5024,7 +5026,7 @@ public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, tableName, partition, userName, PrincipalType.USER)); ret.setUserPrivileges(partUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> partGroupPriv = new HashMap<>(); for (String groupName : groupNames) { partGroupPriv.put(groupName, getPartitionPrivilege(dbName, tableName, @@ -5033,7 +5035,7 @@ public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, ret.setGroupPrivileges(partGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> partRolePriv = new HashMap<>(); for (String roleName : roleNames) { partRolePriv.put(roleName, getPartitionPrivilege(dbName, tableName, @@ -5067,7 +5069,7 @@ public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, tableName, userName, PrincipalType.USER)); ret.setUserPrivileges(tableUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> tableGroupPriv = new HashMap<>(); for (String groupName : groupNames) { tableGroupPriv.put(groupName, getTablePrivilege(dbName, tableName, @@ -5076,7 +5078,7 @@ public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, ret.setGroupPrivileges(tableGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> tableRolePriv = new HashMap<>(); for (String roleName : roleNames) { tableRolePriv.put(roleName, getTablePrivilege(dbName, tableName, @@ -5112,7 +5114,7 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, columnName, partitionName, userName, PrincipalType.USER)); ret.setUserPrivileges(columnUserPriv); } - if (groupNames != null && groupNames.size() > 0) { + if (CollectionUtils.isNotEmpty(groupNames)) { Map> columnGroupPriv = new HashMap<>(); for (String groupName : groupNames) { columnGroupPriv.put(groupName, getColumnPrivilege(dbName, tableName, @@ -5121,7 +5123,7 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, ret.setGroupPrivileges(columnGroupPriv); } Set roleNames = listAllRolesInHierarchy(userName, groupNames); - if (roleNames != null && roleNames.size() > 0) { + if (CollectionUtils.isNotEmpty(roleNames)) { Map> columnRolePriv = new HashMap<>(); for (String roleName : roleNames) { columnRolePriv.put(roleName, getColumnPrivilege(dbName, tableName, @@ -5149,7 +5151,7 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, List userNameTabPartPriv = this .listPrincipalMPartitionGrants(principalName, principalType, dbName, tableName, partName); - if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameTabPartPriv)) { List grantInfos = new ArrayList<>( userNameTabPartPriv.size()); for (int i = 0; i < userNameTabPartPriv.size(); i++) { @@ -5178,7 +5180,7 @@ private PrincipalType getPrincipalTypeFromStr(String str) { List userNameTabPartPriv = this .listAllMTableGrants(principalName, principalType, dbName, tableName); - if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameTabPartPriv)) { List grantInfos = new ArrayList<>( userNameTabPartPriv.size()); for (int i = 0; i < userNameTabPartPriv.size(); i++) { @@ -5205,7 +5207,7 @@ private PrincipalType getPrincipalTypeFromStr(String str) { List userNameColumnPriv = this .listPrincipalMTableColumnGrants(principalName, principalType, dbName, tableName, columnName); - if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameColumnPriv)) { List grantInfos = new ArrayList<>( userNameColumnPriv.size()); for (int i = 0; i < userNameColumnPriv.size(); i++) { @@ -5220,7 +5222,7 @@ private PrincipalType getPrincipalTypeFromStr(String str) { List userNameColumnPriv = this .listPrincipalMPartitionColumnGrants(principalName, principalType, dbName, tableName, partitionName, columnName); - if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) { + if (CollectionUtils.isNotEmpty(userNameColumnPriv)) { List grantInfos = new ArrayList<>( userNameColumnPriv.size()); for (int i = 0; i < userNameColumnPriv.size(); i++) { @@ -5246,7 +5248,7 @@ public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectExce List privilegeList = privileges.getPrivileges(); - if (privilegeList != null && privilegeList.size() > 0) { + if (CollectionUtils.isNotEmpty(privilegeList)) { Iterator privIter = privilegeList.iterator(); Set privSet = new HashSet<>(); while (privIter.hasNext()) { @@ -5438,7 +5440,7 @@ public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectExce } } } - if (persistentObjs.size() > 0) { + if (CollectionUtils.isNotEmpty(persistentObjs)) { pm.makePersistentAll(persistentObjs); } committed = commitTransaction(); @@ -5461,7 +5463,7 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) List privilegeList = privileges.getPrivileges(); - if (privilegeList != null && privilegeList.size() > 0) { + if (CollectionUtils.isNotEmpty(privilegeList)) { Iterator privIter = privilegeList.iterator(); while (privIter.hasNext()) { @@ -5677,7 +5679,7 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } } - if (persistentObjs.size() > 0) { + if (CollectionUtils.isNotEmpty(persistentObjs)) { if (grantOption) { // If grant option specified, only update the privilege, don't remove it. // Grant option has already been removed from the privileges in the section above @@ -7399,8 +7401,8 @@ private void writeMTableColumnStatistics(Table table, MTableColumnStatistics mSt QueryWrapper queryWrapper = new QueryWrapper(); try { - LOG.info("Updating table level column statistics for db=" + dbName + " tableName=" + tableName - + " colName=" + colName); + LOG.info("Updating table level column statistics for db={} tableName={}" + + " colName={}", tableName, dbName, colName); validateTableCols(table, Lists.newArrayList(colName)); if (oldStats != null) { @@ -7898,9 +7900,8 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, InvalidObjectException, InvalidInputException { boolean ret = false; Query query = null; - if (dbName == null) { - dbName = Warehouse.DEFAULT_DATABASE_NAME; - } + dbName = org.apache.commons.lang.StringUtils.defaultString(dbName, + Warehouse.DEFAULT_DATABASE_NAME); if (tableName == null) { throw new InvalidInputException("Table name is null."); } @@ -7976,9 +7977,8 @@ public boolean deleteTableColumnStatistics(String dbName, String tableName, Stri throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { boolean ret = false; Query query = null; - if (dbName == null) { - dbName = Warehouse.DEFAULT_DATABASE_NAME; - } + dbName = org.apache.commons.lang.StringUtils.defaultString(dbName, + Warehouse.DEFAULT_DATABASE_NAME); if (tableName == null) { throw new InvalidInputException("Table name is null."); } @@ -8093,7 +8093,7 @@ public boolean addToken(String tokenId, String delegationToken) { rollbackTransaction(); } } - LOG.debug("Done executing addToken with status : " + committed); + LOG.debug("Done executing addToken with status : {}", committed); return committed && (token == null); } @@ -8115,7 +8115,7 @@ public boolean removeToken(String tokenId) { rollbackTransaction(); } } - LOG.debug("Done executing removeToken with status : " + committed); + LOG.debug("Done executing removeToken with status : {}", committed); return committed && (token != null); } @@ -8137,7 +8137,7 @@ public String getToken(String tokenId) { rollbackTransaction(); } } - LOG.debug("Done executing getToken with status : " + committed); + LOG.debug("Done executing getToken with status : {}", committed); return (null == token) ? null : token.getTokenStr(); } @@ -8160,7 +8160,7 @@ public String getToken(String tokenId) { } return tokenIdents; } finally { - LOG.debug("Done executing getAllTokenIdentifers with status : " + committed); + LOG.debug("Done executing getAllTokenIdentifers with status : {}", committed); rollbackAndCleanup(committed, query); } } @@ -8179,7 +8179,7 @@ public int addMasterKey(String key) throws MetaException{ rollbackTransaction(); } } - LOG.debug("Done executing addMasterKey with status : " + committed); + LOG.debug("Done executing addMasterKey with status : {}", committed); if (committed) { return ((IntIdentity)pm.getObjectId(masterKey)).getKey(); } else { @@ -8206,7 +8206,7 @@ public void updateMasterKey(Integer id, String key) throws NoSuchObjectException } finally { rollbackAndCleanup(committed, query); } - LOG.debug("Done executing updateMasterKey with status : " + committed); + LOG.debug("Done executing updateMasterKey with status : {}", committed); if (null == masterKey) { throw new NoSuchObjectException("No key found with keyId: " + id); } @@ -8234,7 +8234,7 @@ public boolean removeMasterKey(Integer id) { } finally { rollbackAndCleanup(success, query); } - LOG.debug("Done executing removeMasterKey with status : " + success); + LOG.debug("Done executing removeMasterKey with status : {}", success); return (null != masterKey) && success; } @@ -8257,7 +8257,7 @@ public boolean removeMasterKey(Integer id) { } return masterKeys; } finally { - LOG.debug("Done executing getMasterKeys with status : " + committed); + LOG.debug("Done executing getMasterKeys with status : {}", committed); rollbackAndCleanup(committed, query); } } @@ -8291,18 +8291,17 @@ private synchronized void checkSchema() throws MetaException { if (dbSchemaVer == null) { if (strictValidation) { - throw new MetaException("Version information not found in metastore. "); + throw new MetaException("Version information not found in metastore."); } else { - LOG.warn("Version information not found in metastore. " - + ConfVars.SCHEMA_VERIFICATION.toString() + - " is not enabled so recording the schema version " + + LOG.warn("Version information not found in metastore. {} is not " + + "enabled so recording the schema version {}", ConfVars.SCHEMA_VERIFICATION, hiveSchemaVer); setMetaStoreSchemaVersion(hiveSchemaVer, "Set by MetaStore " + USER + "@" + HOSTNAME); } } else { if (metastoreSchemaInfo.isVersionCompatible(hiveSchemaVer, dbSchemaVer)) { - LOG.debug("Found expected HMS version of " + dbSchemaVer); + LOG.debug("Found expected HMS version of {}", dbSchemaVer); } else { // metastore schema version is different than Hive distribution needs if (strictValidation) { @@ -8310,9 +8309,9 @@ private synchronized void checkSchema() throws MetaException { " does not match metastore's schema version " + dbSchemaVer + " Metastore is not upgraded or corrupt"); } else { - LOG.error("Version information found in metastore differs " + dbSchemaVer + - " from expected schema version " + hiveSchemaVer + - ". Schema verififcation is disabled " + ConfVars.SCHEMA_VERIFICATION); + LOG.error("Version information found in metastore differs {} " + + "from expected schema version {}. Schema verififcation is disabled {}", + dbSchemaVer, hiveSchemaVer, ConfVars.SCHEMA_VERIFICATION); setMetaStoreSchemaVersion(hiveSchemaVer, "Set by MetaStore " + USER + "@" + HOSTNAME); } @@ -8381,10 +8380,10 @@ public void setMetaStoreSchemaVersion(String schemaVersion, String comment) thro MetastoreConf.getBoolVar(getConf(), ConfVars.SCHEMA_VERIFICATION_RECORD_VERSION); if (!recordVersion) { LOG.warn("setMetaStoreSchemaVersion called but recording version is disabled: " + - "version = " + schemaVersion + ", comment = " + comment); + "version = {}, comment = {}", schemaVersion, comment); return; } - LOG.warn("Setting metastore schema version in db to " + schemaVersion); + LOG.warn("Setting metastore schema version in db to {}", schemaVersion); try { mSchemaVer = getMSchemaVersion(); @@ -8418,7 +8417,7 @@ public boolean doesPartitionExist(String dbName, String tableName, List private void debugLog(String message) { if (LOG.isDebugEnabled()) { - LOG.debug(message + getCallStack()); + LOG.debug("{} {}", message, getCallStack()); } } @@ -8472,7 +8471,7 @@ private MFunction convertToMFunction(Function func) throws InvalidObjectExceptio try { mdb = getMDatabase(func.getDbName()); } catch (NoSuchObjectException e) { - LOG.error(StringUtils.stringifyException(e)); + LOG.error("Database does not exist", e); throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist."); } @@ -8734,8 +8733,8 @@ public void run() throws MetaException { break; } catch (Exception e) { LOG.info( - "Attempting to acquire the DB log notification lock: " + currentRetries + " out of " - + maxRetries + " retries", e); + "Attempting to acquire the DB log notification lock: {} out of {}" + + " retries", currentRetries, maxRetries, e); if (currentRetries >= maxRetries) { String message = "Couldn't acquire the DB log notification lock because we reached the maximum" @@ -8774,7 +8773,7 @@ public void addNotificationEvent(NotificationEvent entry) { Collection ids = (Collection) objectQuery.execute(); MNotificationNextId mNotificationNextId = null; boolean needToPersistId; - if (ids == null || ids.size() == 0) { + if (CollectionUtils.isEmpty(ids)) { mNotificationNextId = new MNotificationNextId(1L); needToPersistId = true; } else { @@ -8806,7 +8805,7 @@ public void cleanNotificationEvents(int olderThan) { query = pm.newQuery(MNotificationLog.class, "eventTime < tooOld"); query.declareParameters("java.lang.Integer tooOld"); Collection toBeRemoved = (Collection) query.execute(tooOld); - if (toBeRemoved != null && toBeRemoved.size() > 0) { + if (CollectionUtils.isNotEmpty(toBeRemoved)) { pm.deletePersistentAll(toBeRemoved); } commited = commitTransaction(); @@ -8824,7 +8823,7 @@ public CurrentNotificationEventId getCurrentNotificationEventId() { query = pm.newQuery(MNotificationNextId.class); Collection ids = (Collection) query.execute(); long id = 0; - if (ids != null && ids.size() > 0) { + if (CollectionUtils.isNotEmpty(ids)) { id = ids.iterator().next().getNextEventId() - 1; } commited = commitTransaction(); @@ -8986,7 +8985,7 @@ private static void clearOutPmfClassLoaderCache(PersistenceManagerFactory pmf) { classLoaderResolverMap.set(nc, new HashMap()); LOG.debug("Removed cached classloaders from DataNucleus NucleusContext"); } catch (Exception e) { - LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext ", e); + LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext", e); } } @@ -8997,8 +8996,8 @@ private static void clearClr(ClassLoaderResolver clr) throws Exception { long resourcesCleared = clearFieldMap(clri,"resources"); long loadedClassesCleared = clearFieldMap(clri,"loadedClasses"); long unloadedClassesCleared = clearFieldMap(clri, "unloadedClasses"); - LOG.debug("Cleared ClassLoaderResolverImpl: " + - resourcesCleared + "," + loadedClassesCleared + "," + unloadedClassesCleared); + LOG.debug("Cleared ClassLoaderResolverImpl: {}, {}, {}", + resourcesCleared, loadedClassesCleared, unloadedClassesCleared); } } } @@ -9379,7 +9378,7 @@ public void dropConstraint(String dbName, String tableName, List tabConstraints = listAllTableConstraintsWithOptionalConstraintName( dbName, tableName, constraintName); - if (tabConstraints != null && tabConstraints.size() > 0) { + if (CollectionUtils.isNotEmpty(tabConstraints)) { pm.deletePersistentAll(tabConstraints); } else { throw new NoSuchObjectException("The constraint: " + constraintName +