diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 765fa119ea..ed6446a3ec 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -1894,13 +1894,16 @@ public void testDropPartitionEventWithPartitionOnTimestampColumn() throws IOExce String testName = "dropPartitionEventWithPartitionOnTimestampColumn"; String dbName = createDB(testName, driver); run("CREATE TABLE " + dbName + ".ptned(a string) PARTITIONED BY (b timestamp)", driver); + String[] ptn_data = new String[] { "fourteen" }; + String ptnVal = "2017-10-01 01:00:10.1"; + run("INSERT INTO TABLE " + dbName + ".ptned PARTITION(b=\"" + ptnVal +"\") values('" + ptn_data[0] + "')", driver); // Bootstrap dump/load String replDbName = dbName + "_dupe"; Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName); - String[] ptn_data = new String[] { "fifteen" }; - String ptnVal = "2017-10-24 00:00:00.0"; + ptn_data = new String[] { "fifteen" }; + ptnVal = "2017-10-24 00:00:00.0"; run("INSERT INTO TABLE " + dbName + ".ptned PARTITION(b=\"" + ptnVal +"\") values('" + ptn_data[0] + "')", driver); // Replicate insert event and verify @@ -1914,6 +1917,35 @@ public void testDropPartitionEventWithPartitionOnTimestampColumn() throws IOExce verifyIfPartitionNotExist(replDbName, "ptned", new ArrayList<>(Arrays.asList(ptnVal)), metaStoreClientMirror); } + /** + * Verify replication when string partition column value has special chars + * @throws IOException + */ + @Test + public void testWithStringPartitionSpecialChars() throws IOException { + String testName = "testWithStringPartitionSpecialChars"; + String dbName = createDB(testName, driver); + run("CREATE TABLE " + dbName + ".ptned(v string) PARTITIONED BY (p string)", driver); + String[] ptn_data = new String[] { "fourteen", "fifteen" }; + String[] ptnVal = new String [] {"has a space, /, and \t tab", "another set of '#@ chars" }; + run("INSERT INTO TABLE " + dbName + ".ptned PARTITION(p=\"" + ptnVal[0] +"\") values('" + ptn_data[0] + "')", driver); + + // Bootstrap dump/load + String replDbName = dbName + "_dupe"; + Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName); + + run("INSERT INTO TABLE " + dbName + ".ptned PARTITION(p=\"" + ptnVal[1] +"\") values('" + ptn_data[1] + "')", driver); + // Replicate insert event and verify + Tuple incrDump = incrementalLoadAndVerify(dbName, bootstrapDump.lastReplId, replDbName); + verifyRun("SELECT p from " + replDbName + ".ptned ORDER BY p desc", ptnVal, driverMirror); + + run("ALTER TABLE " + dbName + ".ptned DROP PARTITION(p=\"" + ptnVal[0] + "\")", driver); + + // Replicate drop partition event and verify + incrementalLoadAndVerify(dbName, incrDump.lastReplId, replDbName); + verifyIfPartitionNotExist(replDbName, "ptned", new ArrayList<>(Arrays.asList(ptnVal[0])), metaStoreClientMirror); + } + @Test public void testRenameTableWithCM() throws IOException { String testName = "renameTableWithCM"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java index 2f636b626c..4642012648 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java @@ -108,6 +108,6 @@ private BufferedWriter writer() throws IOException { private String encodedUri(FileStatus fileStatus) throws IOException { Path currentDataFilePath = fileStatus.getPath(); String checkSum = ReplChangeManager.checksumFor(currentDataFilePath, dataFileSystem); - return ReplChangeManager.encodeFileUri(currentDataFilePath.toUri().toString(), checkSum); + return ReplChangeManager.encodeFileUri(currentDataFilePath.toString(), checkSum); } }