diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java index bfb25aa..ce8fe60 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestDDLWithRemoteMetastoreSecondNamenode.java @@ -52,6 +52,7 @@ private static final String Table4Name = "table4_nondefault_nn"; private static final String Table5Name = "table5_nondefault_nn"; private static final String Table6Name = "table6_nondefault_nn"; + private static final String Table7Name = "table7_nondefault_nn"; private static final String Index1Name = "index1_table1_nondefault_nn"; private static final String Index2Name = "index2_table1_nondefault_nn"; private static final String tmpdir = System.getProperty("test.tmp.dir"); @@ -197,6 +198,27 @@ else if (new Path(location).toUri().getScheme()!= null) { } } + private void alterPartitionAndCheck(Table table, String column, + String value, String location) throws CommandNeedRetryException, HiveException { + assertNotNull(location); + executeQuery("ALTER TABLE " + table.getTableName() + + " PARTITION (" + column + "='" + value + "')" + + " SET LOCATION '" + location + "'"); + HashMap partitions = new HashMap(); + partitions.put(column, value); + Partition partition = db.getPartition(table, partitions, false); + assertNotNull("Partition object is expected for " + table.getTableName() , partition); + String locationActual = partition.getLocation(); + if (new Path(location).toUri().getScheme() != null) { + assertEquals("Partition should be located in the first filesystem", + fs.makeQualified(new Path(location)).toString(), locationActual); + } + else { + assertEquals("Partition should be located in the second filesystem", + fs2.makeQualified(new Path(location)).toString(), locationActual); + } + } + private Table createTableAndCheck(String tableName, String tableLocation) throws CommandNeedRetryException, HiveException, URISyntaxException { return createTableAndCheck(null, tableName, tableLocation); @@ -294,6 +316,15 @@ public void testCreateTableWithIndexAndPartitionsNonDefaultNameNode() throws Exc createTableAndCheck(table1, Table6Name, null); } + public void testAlterPartitionSetLocationNonDefaultNameNode() throws Exception { + assertTrue("Test suite should have been initialized", isInitialized); + String tableLocation = tmppathFs2 + "/" + "test_set_part_loc"; + Table table = createTableAndCheck(Table7Name, tableLocation); + + addPartitionAndCheck(table, "p", "p1", "/tmp/test/1"); + alterPartitionAndCheck(table, "p", "p1", "/tmp/test/2"); + } + public void testCreateDatabaseWithTableNonDefaultNameNode() throws Exception { assertTrue("Test suite should be initialied", isInitialized ); final String tableLocation = tmppathFs2 + "/" + Table3Name; diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 45c77a2..847e0b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -690,6 +690,11 @@ public void alterPartition(String dbName, String tblName, Partition newPart, Env throws InvalidOperationException, HiveException { try { validatePartition(newPart); + String location = newPart.getLocation(); + if (location != null && !Utilities.isDefaultNameNode(conf)) { + location = Utilities.getQualifiedPath(conf, new Path(location)); + newPart.setLocation(location); + } getMSC().alter_partition(dbName, tblName, newPart.getTPartition(), environmentContext); } catch (MetaException e) { @@ -729,6 +734,11 @@ public void alterPartitions(String tblName, List newParts, Environmen if (tmpPart.getParameters() != null) { tmpPart.getParameters().remove(hive_metastoreConstants.DDL_TIME); } + String location = tmpPart.getLocation(); + if (location != null && !Utilities.isDefaultNameNode(conf)) { + location = Utilities.getQualifiedPath(conf, new Path(location)); + tmpPart.setLocation(location); + } newTParts.add(tmpPart.getTPartition()); } getMSC().alter_partitions(names[0], names[1], newTParts, environmentContext);