diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 84398c6..fb90f5a 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -896,7 +896,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "Default property values for newly created tables"), DDL_CTL_PARAMETERS_WHITELIST("hive.ddl.createtablelike.properties.whitelist", "", "Table Properties to copy over when executing a Create Table Like."), - METASTORE_RAW_STORE_IMPL("hive.metastore.rawstore.impl", "org.apache.hadoop.hive.metastore.ObjectStore", + METASTORE_RAW_STORE_IMPL("hive.metastore.rawstore.impl", "org.apache.hadoop.hive.metastore.cache.CachedStore", "Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. \n" + "This class is used to store and retrieval of raw metadata objects such as table, database"), METASTORE_CACHED_RAW_STORE_IMPL("hive.metastore.cached.rawstore.impl", "org.apache.hadoop.hive.metastore.ObjectStore", diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index b897ffa..d39d7e2 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -350,7 +350,9 @@ public void initConf() throws Exception { if (!useHBaseMetastore) { // Plug verifying metastore in for testing DirectSQL. conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, - "org.apache.hadoop.hive.metastore.VerifyingObjectStore"); + "org.apache.hadoop.hive.metastore.cache.CachedStore"); + conf.setVar(HiveConf.ConfVars.METASTORE_CACHED_RAW_STORE_IMPL, + "org.apache.hadoop.hive.metastore.VerifyingObjectStore"); } else { conf.setVar(ConfVars.METASTORE_RAW_STORE_IMPL, HBaseStore.class.getName()); conf.setBoolVar(ConfVars.METASTORE_FASTPATH, true); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java index 39b1676..8b8aaa1 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java @@ -514,8 +514,8 @@ public boolean addPartitions(String dbName, String tblName, if (succ) { interruptCacheUpdateMaster(); for (Partition part : parts) { - SharedCache.addPartitionToCache(HiveStringUtils.normalizeIdentifier(dbName), - HiveStringUtils.normalizeIdentifier(tblName), part); + SharedCache.addPartitionToCache(HiveStringUtils.normalizeIdentifier(part.getDbName()), + HiveStringUtils.normalizeIdentifier(part.getTableName()), part); } } return succ; @@ -545,6 +545,8 @@ public Partition getPartition(String dbName, String tableName, HiveStringUtils.normalizeIdentifier(tableName), part_vals); if (part != null) { part.unsetPrivileges(); + } else { + throw new NoSuchObjectException(); } return part; } @@ -782,6 +784,7 @@ public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr, for (String partName : partNames) { Partition part = SharedCache.getPartitionFromCache(HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tblName), partNameToVals(partName)); + part.unsetPrivileges(); result.add(part); } return hasUnknownPartitions; @@ -1046,7 +1049,7 @@ public Partition getPartitionWithAuth(String dbName, String tblName, } } if (!psMatch) { - break; + continue; } if (maxParts == -1 || count < maxParts) { partNames.add(Warehouse.makePartName(t.getPartitionKeys(), part.getValues())); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index 3172f92..7dfa38f 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -653,7 +653,7 @@ private static ResourceType convertResourceTypes( } } if (sd.getBucketCols() != null) { - SortedSet bucketCols = new TreeSet<>(sd.getBucketCols()); + List bucketCols = new ArrayList<>(sd.getBucketCols()); for (String bucket : bucketCols) md.update(bucket.getBytes(ENCODING)); } if (sd.getSortCols() != null) { @@ -688,6 +688,7 @@ private static ResourceType convertResourceTypes( md.update(e.getValue().getBytes(ENCODING)); } } + md.update(sd.isStoredAsSubDirectories() ? "true".getBytes(ENCODING) : "false".getBytes(ENCODING)); } return md.digest();