diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 00620528ae..c9d978626e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTableFilter; import org.apache.hadoop.hive.metastore.messaging.event.filters.EventBoundaryFilter; import org.apache.hadoop.hive.metastore.messaging.event.filters.MessageFormatFilter; -import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.DriverFactory; import org.apache.hadoop.hive.ql.IDriver; @@ -79,7 +78,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertTrue; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 2a48527a31..cb2cb4089d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -234,11 +234,11 @@ public void testMultipleStagesOfReplicationLoadTask() throws Throwable { WarehouseInstance.Tuple tuple = primary .run("use " + primaryDbName) .run("create table t1 (id int)") - .run("create table t2 (place string) partitioned by (country string)") + .run("create table T2 (place string) partitioned by (country string)") .run("insert into table t2 partition(country='india') values ('bangalore')") .run("insert into table t2 partition(country='us') values ('austin')") .run("insert into table t2 partition(country='france') values ('paris')") - .run("create table t3 (rank int)") + .run("create table T3 (rank int)") .dump(primaryDbName, null); // each table creation itself takes more than one task, give we are giving a max of 1, we should hit multiple runs. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandler.java index 8fdf2f16a2..264ee1145f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandler.java @@ -18,9 +18,15 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.events; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import java.util.ArrayList; +import java.util.List; + public class AddForeignKeyHandler extends AbstractConstraintEventHandler { AddForeignKeyHandler(NotificationEvent event) { super(event); @@ -32,7 +38,19 @@ public void handle(Context withinContext) throws Exception { event.getMessage()); if (shouldReplicate(withinContext)) { DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + + AddForeignKeyMessage message = deserializer.getAddForeignKeyMessage(event.getMessage()); + List foreignKeys = message.getForeignKeys(); + ArrayList result = new ArrayList<>(); + for (SQLForeignKey fk : foreignKeys) { + fk.setFktable_db(fk.getFktable_db().toLowerCase()); + fk.setFktable_name(fk.getFktable_name().toLowerCase()); + fk.setPktable_db(fk.getPktable_db().toLowerCase()); + fk.setPktable_name(fk.getPktable_name().toLowerCase()); + result.add(fk); + } + + dmd.setPayload(MessageFactory.getInstance().buildAddForeignKeyMessage(result).toString()); dmd.write(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandler.java index 335d4e6af9..7f9ea352e8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandler.java @@ -18,9 +18,15 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.events; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; +import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import java.util.ArrayList; +import java.util.List; + public class AddNotNullConstraintHandler extends AbstractConstraintEventHandler { AddNotNullConstraintHandler(NotificationEvent event) { super(event); @@ -30,10 +36,20 @@ public void handle(Context withinContext) throws Exception { LOG.debug("Processing#{} ADD_NOTNULLCONSTRAINT_MESSAGE message : {}", fromEventId(), event.getMessage()); - if (shouldReplicate(withinContext)) { DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + + AddNotNullConstraintMessage message = + deserializer.getAddNotNullConstraintMessage(event.getMessage()); + List notNullConstraints = message.getNotNullConstraints(); + ArrayList result = new ArrayList<>(); + for (SQLNotNullConstraint constraint : notNullConstraints) { + constraint.setTable_db(constraint.getTable_db().toLowerCase()); + constraint.setTable_name(constraint.getTable_name().toLowerCase()); + result.add(constraint); + } + dmd.setPayload( + MessageFactory.getInstance().buildAddNotNullConstraintMessage(result).toString()); dmd.write(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandler.java index cf45c684a7..f2acd8de6e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandler.java @@ -18,9 +18,15 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.events; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import java.util.ArrayList; +import java.util.List; + public class AddPrimaryKeyHandler extends AbstractConstraintEventHandler { AddPrimaryKeyHandler(NotificationEvent event) { super(event); @@ -33,7 +39,16 @@ public void handle(Context withinContext) throws Exception { if (shouldReplicate(withinContext)) { DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + AddPrimaryKeyMessage message = deserializer.getAddPrimaryKeyMessage(event.getMessage()); + List primaryKeys = message.getPrimaryKeys(); + List results = new ArrayList<>(); + for (SQLPrimaryKey pk : primaryKeys) { + pk.setTable_db(pk.getTable_db().toLowerCase()); + pk.setTable_name(pk.getTable_name().toLowerCase()); + results.add(pk); + } + + dmd.setPayload(MessageFactory.getInstance().buildAddPrimaryKeyMessage(results).toString()); dmd.write(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandler.java index 58835a0352..551dfdb85f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandler.java @@ -18,9 +18,15 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.events; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; +import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import java.util.ArrayList; +import java.util.List; + public class AddUniqueConstraintHandler extends AbstractConstraintEventHandler { AddUniqueConstraintHandler(NotificationEvent event) { super(event); @@ -33,7 +39,17 @@ public void handle(Context withinContext) throws Exception { if (shouldReplicate(withinContext)) { DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + AddUniqueConstraintMessage message = + deserializer.getAddUniqueConstraintMessage(event.getMessage()); + List uniqueConstraints = message.getUniqueConstraints(); + List results = new ArrayList<>(); + for (SQLUniqueConstraint constraint : uniqueConstraints) { + constraint.setTable_db(constraint.getTable_db().toLowerCase()); + constraint.setTable_name(constraint.getTable_name().toLowerCase()); + results.add(constraint); + } + dmd.setPayload( + MessageFactory.getInstance().buildAddUniqueConstraintMessage(results).toString()); dmd.write(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java index 3863c59831..a62391a58d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java @@ -17,7 +17,10 @@ */ package org.apache.hadoop.hive.ql.parse.repl.dump.events; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; @@ -31,7 +34,15 @@ public void handle(Context withinContext) throws Exception { LOG.info("Processing#{} ALTER_DATABASE message : {}", fromEventId(), event.getMessage()); DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + + AlterDatabaseMessage alterDatabaseMessage = + deserializer.getAlterDatabaseMessage(event.getMessage()); + Database dbObjBefore = alterDatabaseMessage.getDbObjBefore(); + dbObjBefore.setName(dbObjBefore.getName().toLowerCase()); + Database dbObjAfter = alterDatabaseMessage.getDbObjAfter(); + dbObjAfter.setName(dbObjAfter.getName().toLowerCase()); + dmd.setPayload( + MessageFactory.getInstance().buildAlterDatabaseMessage(dbObjBefore, dbObjAfter).toString()); dmd.write(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandler.java index 6b709a6d52..b0aef216f1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandler.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.parse.repl.dump.events; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; @@ -28,9 +30,15 @@ @Override public void handle(Context withinContext) throws Exception { - LOG.info("Processing#{} DROP_CONSTRAINT_MESSAGE message : {}", fromEventId(), event.getMessage()); + LOG.info("Processing#{} DROP_CONSTRAINT_MESSAGE message : {}", fromEventId(), + event.getMessage()); DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + DropConstraintMessage message = deserializer.getDropConstraintMessage(event.getMessage()); + String payload = MessageFactory.getInstance() + .buildDropConstraintMessage(message.getDB().toLowerCase(), message.getTable().toLowerCase(), + message.getConstraint()).toString(); + + dmd.setPayload(payload); dmd.write(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandler.java index cce0192d43..3e6428e218 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandler.java @@ -19,6 +19,9 @@ import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.messaging.DropTableMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; @@ -33,7 +36,13 @@ public void handle(Context withinContext) throws Exception { LOG.info("Processing#{} DROP_TABLE message : {}", fromEventId(), event.getMessage()); DumpMetaData dmd = withinContext.createDmd(this); - dmd.setPayload(event.getMessage()); + + DropTableMessage dropTableMessage = deserializer.getDropTableMessage(event.getMessage()); + Table tableObj = dropTableMessage.getTableObj(); + tableObj.setDbName(tableObj.getDbName().toLowerCase()); + tableObj.setTableName(tableObj.getTableName().toLowerCase()); + dmd.setPayload(MessageFactory.getInstance().buildDropTableMessage(tableObj).toString()); + dmd.write(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java index 15b7e138ed..75285c61cf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializer.java @@ -38,6 +38,7 @@ public DBSerializer(Database dbObject) { @Override public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException { + dbObject.setName(dbObject.getName().toLowerCase()); dbObject.putToParameters( ReplicationSpec.KEY.CURR_STATE_ID.toString(), additionalPropertiesProvider.getCurrentReplicationState() diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java index f72f430a09..33f75b34e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java @@ -63,6 +63,8 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi } } Function copyObj = new Function(this.function); + copyObj.setFunctionName(copyObj.getFunctionName().toLowerCase()); + copyObj.setDbName(copyObj.getDbName().toLowerCase()); if (!resourceUris.isEmpty()) { assert resourceUris.size() == this.function.getResourceUris().size(); copyObj.setResourceUris(resourceUris); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java index ce83523389..998c49f7ca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializer.java @@ -55,6 +55,8 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi partition.putToParameters("EXTERNAL", "FALSE"); } } + partition.setDbName(partition.getDbName().toLowerCase()); + partition.setTableName(partition.getTableName().toLowerCase()); writer.jsonGenerator.writeString(serializer.toString(partition, UTF_8)); writer.jsonGenerator.flush(); } catch (TException e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java index 143808bb85..76939e83b3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java @@ -52,12 +52,14 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi return; } - Table tTable = tableHandle.getTTable(); - tTable = addPropertiesToTable(tTable, additionalPropertiesProvider); + Table table = tableHandle.getTTable(); + table = addPropertiesToTable(table, additionalPropertiesProvider); try { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); + table.setDbName(table.getDbName().toLowerCase()); + table.setTableName(table.getTableName().toLowerCase()); writer.jsonGenerator - .writeStringField(FIELD_NAME, serializer.toString(tTable, UTF_8)); + .writeStringField(FIELD_NAME, serializer.toString(table, UTF_8)); writer.jsonGenerator.writeFieldName(PartitionSerializer.FIELD_NAME); writePartitions(writer, additionalPropertiesProvider); } catch (TException e) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandlerTest.java new file mode 100644 index 0000000000..76af1246ff --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddForeignKeyHandlerTest.java @@ -0,0 +1,65 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.Collections; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class AddForeignKeyHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void tableNameAndDatabaseNameIsLowerCase() throws Exception { + SQLForeignKey sqlForeignKey = new SQLForeignKey( + "PKDB", "pkTable", "pkcol", + "FKdb", "FKtABLE", "fk_col", + Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, + "fkname", "pkname", + false, false, false); + + AddForeignKeyMessage addForeignKeyMessage = MessageFactory.getInstance() + .buildAddForeignKeyMessage(Collections.singletonList(sqlForeignKey)); + + AddForeignKeyHandler handler = + new AddForeignKeyHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.ADD_FOREIGNKEY.toString(), addForeignKeyMessage.toString())) { + @Override + boolean shouldReplicate(Context withinContext) { + return true; + } + }; + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain pk dbName [pkdb] in lower case", value.contains("pkdb")); + assertTrue(value + " does not contain pk tableName [pktable] in lower case", + value.contains("pktable")); + assertTrue(value + " does not contain fk dbName [fkdb] in lower case", value.contains("fkdb")); + assertTrue(value + " does not contain fk tableName [fktable] in lower case", + value.contains("fktable")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandlerTest.java new file mode 100644 index 0000000000..dc8e70e1b5 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddNotNullConstraintHandlerTest.java @@ -0,0 +1,60 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; +import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.Collections; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class AddNotNullConstraintHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void tableNameAndDatabaseNameIsLowerCase() throws Exception { + SQLNotNullConstraint constraint = new SQLNotNullConstraint( + "A_DB", "a_Table", "pkcol", "name", + false, false, false); + + AddNotNullConstraintMessage constraintMessage = MessageFactory.getInstance() + .buildAddNotNullConstraintMessage(Collections.singletonList(constraint)); + + AddNotNullConstraintHandler handler = + new AddNotNullConstraintHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.ADD_NOTNULLCONSTRAINT.toString(), + constraintMessage.toString())) { + @Override + boolean shouldReplicate(Context withinContext) { + return true; + } + }; + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain dbName [a_db] in lower case", value.contains("a_db")); + assertTrue(value + " does not contain tableName [a_table] in lower case", + value.contains("a_table")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandlerTest.java new file mode 100644 index 0000000000..b67bc744ab --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPrimaryKeyHandlerTest.java @@ -0,0 +1,60 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.Collections; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class AddPrimaryKeyHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void tableNameAndDatabaseNameIsLowerCase() throws Exception { + SQLPrimaryKey sqlForeignKey = + new SQLPrimaryKey("TABLE_NAME", "DB_NAME", "col1", Integer.MAX_VALUE, "pk", true, true, + true); + + AddPrimaryKeyMessage message = MessageFactory.getInstance() + .buildAddPrimaryKeyMessage(Collections.singletonList(sqlForeignKey)); + + AddPrimaryKeyHandler handler = + new AddPrimaryKeyHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.ADD_PRIMARYKEY.toString(), message.toString())) { + @Override + boolean shouldReplicate(Context withinContext) { + return true; + } + }; + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain dbName [db_name] in lower case", + value.contains("db_name")); + assertTrue(value + " does not contain tableName [table_name] in lower case", + value.contains("table_name")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandlerTest.java new file mode 100644 index 0000000000..073f89c76d --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddUniqueConstraintHandlerTest.java @@ -0,0 +1,61 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; +import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.Collections; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class AddUniqueConstraintHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void tableNameAndDatabaseNameIsLowerCase() throws Exception { + SQLUniqueConstraint constraint = + new SQLUniqueConstraint("TABLE_NAME", "DB_NAME", "col1", Integer.MAX_VALUE, "pk", true, + true, + true); + + AddUniqueConstraintMessage message = MessageFactory.getInstance() + .buildAddUniqueConstraintMessage(Collections.singletonList(constraint)); + + AddUniqueConstraintHandler handler = + new AddUniqueConstraintHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.ADD_UNIQUECONSTRAINT.toString(), message.toString())) { + @Override + boolean shouldReplicate(Context withinContext) { + return true; + } + }; + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain dbName [db_name] in lower case", + value.contains("db_name")); + assertTrue(value + " does not contain tableName [table_name] in lower case", + value.contains("table_name")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandlerTest.java new file mode 100644 index 0000000000..3ad95ee721 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandlerTest.java @@ -0,0 +1,51 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.HashMap; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class AlterDatabaseHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void databaseNameIsLowerCase() throws Exception { + AlterDatabaseMessage alterDatabaseMessage = + MessageFactory.getInstance().buildAlterDatabaseMessage( + new Database("BEFORE", "", "", new HashMap<>()), + new Database("AFTER", "", "", new HashMap<>())); + + AlterDatabaseHandler handler = + new AlterDatabaseHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.ALTER_DATABASE.toString(), alterDatabaseMessage.toString())); + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain dbName [before] in lower case", value.contains("before")); + assertTrue(value + " does not contain dbName [after] in lower case", value.contains("after")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandlerTest.java new file mode 100644 index 0000000000..03187d85ad --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropConstraintHandlerTest.java @@ -0,0 +1,49 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class DropConstraintHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void tableNameAndDatabaseNameIsLowerCase() throws Exception { + + DropConstraintMessage message = MessageFactory.getInstance() + .buildDropConstraintMessage("DB_NAME", "TAb_name", "cons_name"); + + DropConstraintHandler handler = + new DropConstraintHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.DROP_CONSTRAINT.toString(), message.toString())); + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain dbName [db_name] in lower case", + value.contains("db_name")); + assertTrue(value + " does not contain tableName [tab_name] in lower case", + value.contains("tab_name")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandlerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandlerTest.java new file mode 100644 index 0000000000..79c2301d8c --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/events/DropTableHandlerTest.java @@ -0,0 +1,55 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.events; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.messaging.DropTableMessage; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; +import org.apache.hadoop.hive.metastore.messaging.MessageFactory; +import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.ArrayList; +import java.util.HashMap; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class DropTableHandlerTest { + + @Mock + private EventHandler.Context context; + + @Test + public void tableNameAndDatabaseNameIsLowerCase() throws Exception { + DropTableMessage dropTableMessage = + MessageFactory.getInstance().buildDropTableMessage( + new Table("SomeTable", "InADB", "", Integer.MAX_VALUE, Integer.MAX_VALUE, + Integer.MAX_VALUE, new StorageDescriptor(), new ArrayList<>(), new HashMap<>(), "", + "", "")); + + DropTableHandler handler = + new DropTableHandler(new NotificationEvent(Long.MAX_VALUE, Integer.MAX_VALUE, + EventMessage.EventType.DROP_TABLE.toString(), dropTableMessage.toString())); + + DumpMetaData mockDmd = mock(DumpMetaData.class); + when(context.createDmd(anyObject())).thenReturn(mockDmd); + + handler.handle(context); + + ArgumentCaptor captor = ArgumentCaptor.forClass(String.class); + verify(mockDmd).setPayload(captor.capture()); + String value = captor.getValue(); + assertTrue(value + " does not contain dbName [inadb] in lower case", value.contains("inadb")); + assertTrue(value + " does not contain tableName [sometable] in lower case", + value.contains("sometable")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializerTest.java new file mode 100644 index 0000000000..c4080240a6 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/DBSerializerTest.java @@ -0,0 +1,46 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.io; + +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.HashMap; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.same; +import static org.powermock.api.mockito.PowerMockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class DBSerializerTest { + @Mock + private FileSystem fs; + @Mock + private Path writePath; + + @Test + public void databaseNameIsInLowercase() throws IOException, SemanticException { + DBSerializer dbSerializer = + new DBSerializer(new Database("DBName", "", "", new HashMap<>())); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + FSDataOutputStream stream = new FSDataOutputStream(out, null); + when(fs.create(same(writePath))).thenReturn(stream); + + try (JsonWriter writer = new JsonWriter(fs, writePath)) { + ReplicationSpec additionalPropertiesProvider = new ReplicationSpec(); + additionalPropertiesProvider.setCurrentReplicationState("34"); + dbSerializer.writeTo(writer, additionalPropertiesProvider); + } + String outputString = out.toString(); + assertTrue(outputString + " does not contain the database name in lowercase", + outputString.contains("dbname")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializerTest.java new file mode 100644 index 0000000000..ef74c0cb8e --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializerTest.java @@ -0,0 +1,54 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.io; + +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.FunctionType; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; + +import static org.junit.Assert.*; +import static org.mockito.Matchers.same; +import static org.powermock.api.mockito.PowerMockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class FunctionSerializerTest { + @Mock + private FileSystem fs; + @Mock + private Path writePath; + + @Test + public void databaseNameAndFunctionNameAreInLowerCase() throws IOException, + SemanticException { + Function function = new Function("TESTFUNCTION", "DBNAME", + "org.apache.some.class.Clazz", "test", + PrincipalType.USER, Integer.MAX_VALUE, FunctionType.JAVA, Collections.emptyList()); + FunctionSerializer functionSerializer = new FunctionSerializer(function, new HiveConf()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + FSDataOutputStream stream = new FSDataOutputStream(out, null); + when(fs.create(same(writePath))).thenReturn(stream); + + try (JsonWriter writer = new JsonWriter(fs, writePath)) { + functionSerializer.writeTo(writer, new ReplicationSpec()); + } + + String outputString = out.toString(); + assertTrue(outputString + " does not contain the function name in lowercase", + outputString.contains("testfunction")); + assertTrue(outputString + " does not contain the database name in lowercase", + outputString.contains("dbname")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializerTest.java new file mode 100644 index 0000000000..453247d060 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/PartitionSerializerTest.java @@ -0,0 +1,51 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.io; + +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.same; +import static org.powermock.api.mockito.PowerMockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class PartitionSerializerTest { + @Mock + private FileSystem fs; + @Mock + private Path writePath; + + @Test + public void tableNameAndDatabaseNameAreInLowerCase() throws IOException, SemanticException { + Partition partition = new Partition(Collections.emptyList(), "DBName", "TABLENAME", + Integer.MAX_VALUE, Integer.MAX_VALUE, new StorageDescriptor(), Collections.emptyMap()); + PartitionSerializer partitionSerializer = new PartitionSerializer(partition); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + FSDataOutputStream stream = new FSDataOutputStream(out, null); + when(fs.create(same(writePath))).thenReturn(stream); + + try (JsonWriter writer = new JsonWriter(fs, writePath)) { + writer.jsonGenerator.writeFieldName("PartitionField"); + partitionSerializer.writeTo(writer, new ReplicationSpec()); + } + + String outputString = out.toString(); + assertTrue(outputString + " does not contain the table name in lowercase", + outputString.contains("tablename")); + assertTrue(outputString + " does not contain the database name in lowercase", + outputString.contains("dbname")); + } +} \ No newline at end of file diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializerTest.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializerTest.java new file mode 100644 index 0000000000..72d10ebaf2 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializerTest.java @@ -0,0 +1,57 @@ +package org.apache.hadoop.hive.ql.parse.repl.dump.io; + +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.same; +import static org.powermock.api.mockito.PowerMockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class TableSerializerTest { + @Mock + private FileSystem fs; + @Mock + private Path writePath; + + @Test + public void testTableNameAndDatabaseNameIsInLowerCase() throws IOException, SemanticException { + Table tableHandle = new Table(); + tableHandle.setTTable( + new org.apache.hadoop.hive.metastore.api.Table("TableName", "dbName", "test", + Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, new StorageDescriptor(), + Collections.emptyList(), Collections.emptyMap(), "", "", + "")); + + TableSerializer tableSerializer = + new TableSerializer(tableHandle, null, new HiveConf()); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + FSDataOutputStream stream = + new FSDataOutputStream(out, null); + when(fs.create(same(writePath))).thenReturn(stream); + + try (JsonWriter writer = new JsonWriter(fs, writePath)) { + tableSerializer.writeTo(writer, new ReplicationSpec()); + } + String outputString = out.toString(); + assertTrue(outputString + " does not contain the table name in lowercase", + outputString.contains("tablename")); + assertTrue(outputString + " does not contain the database name in lowercase", + outputString.contains("dbname")); + } +} \ No newline at end of file