diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 662de9a..84839fe 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -3515,6 +3515,16 @@ private boolean drop_partition_common(RawStore ms, String db_name, String tbl_na boolean isExternalTbl = false; Map transactionalListenerResponses = Collections.emptyMap(); + if (db_name == null) { + throw new MetaException("The DB name cannot be null."); + } + if (tbl_name == null) { + throw new MetaException("The table name cannot be null."); + } + if (part_vals == null) { + throw new MetaException("The partition values cannot be null."); + } + try { ms.openTransaction(); part = ms.getPartition(db_name, tbl_name, part_vals); diff --git standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 3128089..e2f159d 100644 --- standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -993,6 +993,9 @@ public boolean dropPartition(String db_name, String tbl_name, @Override public boolean dropPartition(String db_name, String tbl_name, List part_vals, PartitionDropOptions options) throws TException { + if (options == null) { + options = PartitionDropOptions.instance(); + } return dropPartition(db_name, tbl_name, part_vals, options.deleteData, options.purgeData? getEnvironmentContextWithIfPurgeSet() : null); } @@ -1000,6 +1003,13 @@ public boolean dropPartition(String db_name, String tbl_name, public boolean dropPartition(String db_name, String tbl_name, List part_vals, boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException { + if (part_vals != null) { + for (String partVal : part_vals) { + if (partVal == null) { + throw new MetaException("The partition value must not be null."); + } + } + } return client.drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, envContext); } diff --git standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java index 4d94ebf..d2ba4be 100644 --- standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java +++ standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestDropPartitions.java @@ -264,18 +264,13 @@ public void testDropPartitionNonExistingPartVals() throws Exception { client.dropPartition(DB_NAME, TABLE_NAME, Lists.newArrayList("2017", "may"), false); } - @Test + @Test(expected = MetaException.class) public void testDropPartitionNullVal() throws Exception { List partVals = new ArrayList<>(); partVals.add(null); partVals.add(null); - try { - client.dropPartition(DB_NAME, TABLE_NAME, partVals, false); - Assert.fail("NullPointerException or NoSuchObjectException is expected to be thrown"); - } catch (NullPointerException | NoSuchObjectException e) { - // TODO: Should not throw NPE. - } + client.dropPartition(DB_NAME, TABLE_NAME, partVals, false); } @Test(expected = NoSuchObjectException.class) @@ -400,10 +395,13 @@ public void testDropPartitionPurgeSetInTable() throws Exception { checkPartitionsAfterDelete(tableName, droppedPartitions, remainingPartitions, true, true); } - @Test(expected = NullPointerException.class) + @Test public void testDropPartitionNullPartDropOptions() throws Exception { - // TODO: This should not throw NPE + client.dropPartition(DB_NAME, TABLE_NAME, PARTITIONS[0].getValues(), null); + List droppedPartitions = Lists.newArrayList(PARTITIONS[0]); + List remainingPartitions = Lists.newArrayList(PARTITIONS[1], PARTITIONS[2]); + checkPartitionsAfterDelete(TABLE_NAME, droppedPartitions, remainingPartitions, true, false); } // Tests for dropPartition(String db_name, String tbl_name, String name,