From 6b23a00adfd90f5d4339d224a5136f8813e04cae Mon Sep 17 00:00:00 2001 From: Alexander Pakulov Date: Thu, 18 Jun 2015 11:01:14 -0700 Subject: [PATCH] KAFKA-1782; Junit3 Misusage --- .../integration/kafka/api/ConsumerBounceTest.scala | 2 ++ .../scala/integration/kafka/api/ConsumerTest.scala | 3 +- .../kafka/api/IntegrationTestHarness.scala | 5 ++- .../integration/kafka/api/ProducerBounceTest.scala | 4 ++- .../kafka/api/ProducerCompressionTest.scala | 3 +- .../kafka/api/ProducerFailureHandlingTest.scala | 14 ++++---- .../integration/kafka/api/ProducerSendTest.scala | 8 ++--- .../test/scala/unit/kafka/KafkaConfigTest.scala | 2 +- .../scala/unit/kafka/admin/AddPartitionsTest.scala | 8 +++-- .../test/scala/unit/kafka/admin/AdminTest.scala | 20 +++++------ .../unit/kafka/admin/DeleteConsumerGroupTest.scala | 3 +- .../scala/unit/kafka/admin/DeleteTopicTest.scala | 5 ++- .../scala/unit/kafka/admin/TopicCommandTest.scala | 7 ++-- .../test/scala/unit/kafka/api/ApiUtilsTest.scala | 2 +- .../api/RequestResponseSerializationTest.scala | 2 +- .../unit/kafka/cluster/BrokerEndPointTest.scala | 3 +- .../test/scala/unit/kafka/common/ConfigTest.scala | 2 +- .../test/scala/unit/kafka/common/TopicTest.scala | 2 +- .../unit/kafka/consumer/ConsumerIteratorTest.scala | 9 +++-- .../kafka/consumer/PartitionAssignorTest.scala | 5 ++- .../unit/kafka/consumer/TopicFilterTest.scala | 2 +- .../consumer/ZookeeperConsumerConnectorTest.scala | 8 +++-- .../ConsumerCoordinatorResponseTest.scala | 6 ++-- .../coordinator/ConsumerGroupMetadataTest.scala | 2 +- .../coordinator/CoordinatorMetadataTest.scala | 2 +- .../kafka/coordinator/PartitionAssignorTest.scala | 2 +- .../kafka/integration/AutoOffsetResetTest.scala | 9 ++--- .../scala/unit/kafka/integration/FetcherTest.scala | 11 +++--- .../kafka/integration/KafkaServerTestHarness.scala | 12 ++++--- .../unit/kafka/integration/MinIsrConfigTest.scala | 3 +- .../unit/kafka/integration/PrimitiveApiTest.scala | 5 ++- .../integration/ProducerConsumerTestHarness.scala | 40 ++++++++++++---------- .../unit/kafka/integration/RollingBounceTest.scala | 8 +++-- .../unit/kafka/integration/TopicMetadataTest.scala | 10 +++--- .../integration/UncleanLeaderElectionTest.scala | 12 +++---- .../consumer/ZookeeperConsumerConnectorTest.scala | 6 ++-- .../javaapi/message/BaseMessageSetTestCases.scala | 2 +- .../javaapi/message/ByteBufferMessageSetTest.scala | 2 +- .../test/scala/unit/kafka/log/CleanerTest.scala | 17 ++++----- .../scala/unit/kafka/log/FileMessageSetTest.scala | 2 +- .../unit/kafka/log/LogCleanerIntegrationTest.scala | 4 +-- .../test/scala/unit/kafka/log/LogConfigTest.scala | 4 +-- .../test/scala/unit/kafka/log/LogManagerTest.scala | 18 +++++----- .../test/scala/unit/kafka/log/LogSegmentTest.scala | 8 ++--- core/src/test/scala/unit/kafka/log/LogTest.scala | 2 +- .../scala/unit/kafka/log/OffsetIndexTest.scala | 2 +- .../test/scala/unit/kafka/log/OffsetMapTest.scala | 2 +- .../kafka/message/BaseMessageSetTestCases.scala | 2 +- .../kafka/message/ByteBufferMessageSetTest.scala | 2 +- .../kafka/message/MessageCompressionTest.scala | 2 +- .../scala/unit/kafka/message/MessageTest.scala | 2 +- .../unit/kafka/message/MessageWriterTest.scala | 2 +- .../scala/unit/kafka/metrics/KafkaTimerTest.scala | 5 ++- .../scala/unit/kafka/metrics/MetricsTest.scala | 6 ++-- .../unit/kafka/network/SocketServerTest.scala | 27 +++++++-------- .../unit/kafka/producer/AsyncProducerTest.scala | 15 ++------ .../scala/unit/kafka/producer/ProducerTest.scala | 28 +++++++-------- .../unit/kafka/producer/SyncProducerTest.scala | 5 ++- .../unit/kafka/server/AdvertiseBrokerTest.scala | 8 +++-- .../unit/kafka/server/DelayedOperationTest.scala | 15 ++++---- .../kafka/server/DynamicConfigChangeTest.scala | 13 ++++--- .../server/HighwatermarkPersistenceTest.scala | 3 +- .../unit/kafka/server/ISRExpirationTest.scala | 12 +++---- .../kafka/server/KafkaConfigConfigDefTest.scala | 4 +-- .../scala/unit/kafka/server/KafkaConfigTest.scala | 6 ++-- .../unit/kafka/server/LeaderElectionTest.scala | 8 +++-- .../scala/unit/kafka/server/LogOffsetTest.scala | 5 ++- .../scala/unit/kafka/server/LogRecoveryTest.scala | 6 ++-- .../scala/unit/kafka/server/OffsetCommitTest.scala | 5 ++- .../scala/unit/kafka/server/ReplicaFetchTest.scala | 6 ++-- .../unit/kafka/server/ReplicaManagerTest.scala | 5 ++- .../kafka/server/ServerGenerateBrokerIdTest.scala | 8 ++--- .../unit/kafka/server/ServerShutdownTest.scala | 10 +++--- .../unit/kafka/server/ServerStartupTest.scala | 5 ++- .../scala/unit/kafka/server/SimpleFetchTest.scala | 16 ++++----- .../kafka/utils/ByteBoundedBlockingQueueTest.scala | 2 +- .../unit/kafka/utils/CommandLineUtilsTest.scala | 2 +- .../unit/kafka/utils/IteratorTemplateTest.scala | 2 +- .../src/test/scala/unit/kafka/utils/JsonTest.scala | 2 +- .../unit/kafka/utils/ReplicationUtilsTest.scala | 7 ++-- .../scala/unit/kafka/utils/SchedulerTest.scala | 2 +- .../test/scala/unit/kafka/utils/TestUtils.scala | 5 ++- .../unit/kafka/utils/timer/TimerTaskListTest.scala | 2 +- .../scala/unit/kafka/utils/timer/TimerTest.scala | 2 +- .../test/scala/unit/kafka/zk/ZKEphemeralTest.scala | 4 +-- core/src/test/scala/unit/kafka/zk/ZKPathTest.scala | 13 ++++--- .../scala/unit/kafka/zk/ZooKeeperTestHarness.scala | 13 +++---- 87 files changed, 291 insertions(+), 306 deletions(-) diff --git a/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala b/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala index b0750fa..fb7a2ac 100644 --- a/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala +++ b/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala @@ -19,6 +19,7 @@ import org.apache.kafka.clients.consumer._ import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord} import org.apache.kafka.common.TopicPartition import org.junit.Assert._ +import org.junit.Before import scala.collection.JavaConversions._ @@ -51,6 +52,7 @@ class ConsumerBounceTest extends IntegrationTestHarness with Logging { .map(KafkaConfig.fromProps(_, serverConfig)) } + @Before override def setUp() { super.setUp() diff --git a/core/src/test/scala/integration/kafka/api/ConsumerTest.scala b/core/src/test/scala/integration/kafka/api/ConsumerTest.scala index 3eb5f95..6eaf14c 100644 --- a/core/src/test/scala/integration/kafka/api/ConsumerTest.scala +++ b/core/src/test/scala/integration/kafka/api/ConsumerTest.scala @@ -25,11 +25,11 @@ import kafka.server.KafkaConfig import java.util.ArrayList import org.junit.Assert._ +import org.junit.Before import scala.collection.JavaConversions._ import kafka.coordinator.ConsumerCoordinator - /** * Integration tests for the new consumer that cover basic usage as well as server failures */ @@ -56,6 +56,7 @@ class ConsumerTest extends IntegrationTestHarness with Logging { this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") this.consumerConfig.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false") + @Before override def setUp() { super.setUp() diff --git a/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala b/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala index afcc349..8080b08 100644 --- a/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala +++ b/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala @@ -25,6 +25,7 @@ import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.clients.producer.KafkaProducer import kafka.server.{OffsetManager, KafkaConfig} import kafka.integration.KafkaServerTestHarness +import org.junit.{After, Before} import scala.collection.mutable.Buffer import kafka.coordinator.ConsumerCoordinator @@ -49,6 +50,7 @@ trait IntegrationTestHarness extends KafkaServerTestHarness { cfgs.map(KafkaConfig.fromProps) } + @Before override def setUp() { super.setUp() producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapUrl) @@ -70,7 +72,8 @@ trait IntegrationTestHarness extends KafkaServerTestHarness { servers, servers(0).consumerCoordinator.offsetsTopicConfigs) } - + + @After override def tearDown() { producers.foreach(_.close()) consumers.foreach(_.close()) diff --git a/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala b/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala index ce70a0a..2dbb9dc 100644 --- a/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala @@ -22,7 +22,7 @@ import kafka.utils.{ShutdownableThread, TestUtils} import org.apache.kafka.clients.producer._ import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback import org.junit.Assert._ -import org.junit.Test +import org.junit.{After, Before, Test} class ProducerBounceTest extends KafkaServerTestHarness { private val producerBufferSize = 30000 @@ -62,6 +62,7 @@ class ProducerBounceTest extends KafkaServerTestHarness { private val topic1 = "topic-1" private val topic2 = "topic-2" + @Before override def setUp() { super.setUp() @@ -70,6 +71,7 @@ class ProducerBounceTest extends KafkaServerTestHarness { producer3 = TestUtils.createNewProducer(brokerList, acks = -1, blockOnBufferFull = false, bufferSize = producerBufferSize) } + @After override def tearDown() { if (producer1 != null) producer1.close if (producer2 != null) producer2.close diff --git a/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala b/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala index 83de81c..87db255 100755 --- a/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala @@ -19,7 +19,6 @@ package kafka.api.test import java.util.{Properties, Collection, ArrayList} -import org.scalatest.junit.JUnit3Suite import org.junit.runners.Parameterized import org.junit.runner.RunWith import org.junit.runners.Parameterized.Parameters @@ -36,7 +35,7 @@ import kafka.utils.{CoreUtils, TestUtils} @RunWith(value = classOf[Parameterized]) -class ProducerCompressionTest(compression: String) extends JUnit3Suite with ZooKeeperTestHarness { +class ProducerCompressionTest(compression: String) extends ZooKeeperTestHarness { private val brokerId = 0 private var server: KafkaServer = null diff --git a/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala b/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala index ee94011..1198df0 100644 --- a/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala @@ -17,22 +17,20 @@ package kafka.api -import org.junit.Test -import org.junit.Assert._ - +import java.util.concurrent.{ExecutionException, TimeUnit, TimeoutException} import java.util.{Properties, Random} -import java.util.concurrent.{TimeoutException, TimeUnit, ExecutionException} import kafka.common.Topic import kafka.consumer.SimpleConsumer import kafka.integration.KafkaServerTestHarness import kafka.server.KafkaConfig import kafka.utils.{ShutdownableThread, TestUtils} - -import org.apache.kafka.common.KafkaException -import org.apache.kafka.common.errors.{InvalidTopicException, NotEnoughReplicasException, NotEnoughReplicasAfterAppendException} import org.apache.kafka.clients.producer._ import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback +import org.apache.kafka.common.KafkaException +import org.apache.kafka.common.errors.{InvalidTopicException, NotEnoughReplicasAfterAppendException, NotEnoughReplicasException} +import org.junit.Assert._ +import org.junit.{After, Before, Test} class ProducerFailureHandlingTest extends KafkaServerTestHarness { private val producerBufferSize = 30000 @@ -61,6 +59,7 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { private val topic1 = "topic-1" private val topic2 = "topic-2" + @Before override def setUp() { super.setUp() @@ -69,6 +68,7 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { producer3 = TestUtils.createNewProducer(brokerList, acks = -1, blockOnBufferFull = false, bufferSize = producerBufferSize) } + @After override def tearDown() { if (producer1 != null) producer1.close if (producer2 != null) producer2.close diff --git a/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala b/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala index 9ce4bd5..91029bf 100644 --- a/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala @@ -30,11 +30,9 @@ import org.apache.kafka.common.config.ConfigException import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.ByteArraySerializer import org.junit.Assert._ -import org.junit.Test -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before, Test} - -class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { +class ProducerSendTest extends KafkaServerTestHarness { val numServers = 2 val overridingProps = new Properties() @@ -49,6 +47,7 @@ class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { private val topic = "topic" private val numRecords = 100 + @Before override def setUp() { super.setUp() @@ -57,6 +56,7 @@ class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { consumer2 = new SimpleConsumer("localhost", servers(1).boundPort(), 100, 1024*1024, "") } + @After override def tearDown() { consumer1.close() consumer2.close() diff --git a/core/src/test/scala/unit/kafka/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/KafkaConfigTest.scala index 4764c89..1233104 100644 --- a/core/src/test/scala/unit/kafka/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/KafkaConfigTest.scala @@ -21,7 +21,7 @@ import java.security.Permission import kafka.server.KafkaConfig import org.junit.{After, Before, Test} -import junit.framework.Assert._ +import org.junit.Assert._ class KafkaTest { diff --git a/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala b/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala index df5c6ba..42d655f 100755 --- a/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala +++ b/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala @@ -17,17 +17,17 @@ package kafka.admin +import org.junit.Assert._ import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils._ -import junit.framework.Assert._ import kafka.utils.{ZkUtils, CoreUtils, TestUtils} import kafka.cluster.Broker import kafka.client.ClientUtils import kafka.server.{KafkaConfig, KafkaServer} +import org.junit.{After, Before} -class AddPartitionsTest extends JUnit3Suite with ZooKeeperTestHarness { +class AddPartitionsTest extends ZooKeeperTestHarness { var configs: Seq[KafkaConfig] = null var servers: Seq[KafkaServer] = Seq.empty[KafkaServer] var brokers: Seq[Broker] = Seq.empty[Broker] @@ -39,6 +39,7 @@ class AddPartitionsTest extends JUnit3Suite with ZooKeeperTestHarness { val topic3 = "new-topic3" val topic4 = "new-topic4" + @Before override def setUp() { super.setUp() @@ -54,6 +55,7 @@ class AddPartitionsTest extends JUnit3Suite with ZooKeeperTestHarness { createTopic(zkClient, topic4, partitionReplicaAssignment = Map(0->Seq(0,3)), servers = servers) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/admin/AdminTest.scala b/core/src/test/scala/unit/kafka/admin/AdminTest.scala index 252ac81..b9b3753 100755 --- a/core/src/test/scala/unit/kafka/admin/AdminTest.scala +++ b/core/src/test/scala/unit/kafka/admin/AdminTest.scala @@ -16,21 +16,19 @@ */ package kafka.admin -import junit.framework.Assert._ -import org.junit.Test -import org.scalatest.junit.JUnit3Suite +import java.io.File import java.util.Properties -import kafka.utils._ + +import kafka.common.{TopicAndPartition, TopicExistsException} import kafka.log._ +import kafka.server.{KafkaConfig, KafkaServer} +import kafka.utils.TestUtils._ +import kafka.utils.{Logging, TestUtils, ZkUtils, _} import kafka.zk.ZooKeeperTestHarness -import kafka.utils.{Logging, ZkUtils, TestUtils} -import kafka.common.{TopicExistsException, TopicAndPartition} -import kafka.server.{KafkaServer, KafkaConfig} -import java.io.File -import TestUtils._ - +import org.junit.Assert._ +import org.junit.Test -class AdminTest extends JUnit3Suite with ZooKeeperTestHarness with Logging { +class AdminTest extends ZooKeeperTestHarness with Logging { @Test def testReplicaAssignment() { diff --git a/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala b/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala index 1913ad6..d3abf08 100644 --- a/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala +++ b/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala @@ -16,7 +16,6 @@ */ package kafka.admin -import org.scalatest.junit.JUnit3Suite import kafka.utils._ import kafka.server.KafkaConfig import org.junit.Test @@ -25,7 +24,7 @@ import org.apache.kafka.clients.producer.{ProducerRecord, KafkaProducer} import kafka.integration.KafkaServerTestHarness -class DeleteConsumerGroupTest extends JUnit3Suite with KafkaServerTestHarness { +class DeleteConsumerGroupTest extends KafkaServerTestHarness { def generateConfigs() = TestUtils.createBrokerConfigs(3, zkConnect, false, true).map(KafkaConfig.fromProps) @Test diff --git a/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala b/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala index fa8ce25..6279180 100644 --- a/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala +++ b/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala @@ -17,16 +17,15 @@ package kafka.admin import kafka.log.Log -import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness -import junit.framework.Assert._ import kafka.utils.{ZkUtils, TestUtils} import kafka.server.{KafkaServer, KafkaConfig} import org.junit.Test +import org.junit.Assert._ import java.util.Properties import kafka.common.TopicAndPartition -class DeleteTopicTest extends JUnit3Suite with ZooKeeperTestHarness { +class DeleteTopicTest extends ZooKeeperTestHarness { @Test def testDeleteTopicWithAllAliveReplicas() { diff --git a/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala b/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala index dcd6988..6717912 100644 --- a/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala +++ b/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala @@ -16,17 +16,16 @@ */ package kafka.admin -import junit.framework.Assert._ import org.junit.Test -import org.scalatest.junit.JUnit3Suite import kafka.utils.Logging import kafka.utils.TestUtils import kafka.zk.ZooKeeperTestHarness +import kafka.coordinator.ConsumerCoordinator import kafka.admin.TopicCommand.TopicCommandOptions import kafka.utils.ZkUtils -import kafka.coordinator.ConsumerCoordinator +import org.junit.Assert._ -class TopicCommandTest extends JUnit3Suite with ZooKeeperTestHarness with Logging { +class TopicCommandTest extends ZooKeeperTestHarness with Logging { @Test def testConfigPreservationAcrossPartitionAlteration() { diff --git a/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala b/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala index 2554425..fff3e7b 100644 --- a/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala +++ b/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala @@ -19,7 +19,7 @@ package kafka.api import org.junit._ import org.scalatest.junit.JUnitSuite -import junit.framework.Assert._ +import org.junit.Assert._ import scala.util.Random import java.nio.ByteBuffer import kafka.common.KafkaException diff --git a/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala b/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala index 5717165..b4c2a22 100644 --- a/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala +++ b/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala @@ -32,7 +32,7 @@ import java.nio.ByteBuffer import org.apache.kafka.common.protocol.SecurityProtocol import org.junit._ import org.scalatest.junit.JUnitSuite -import junit.framework.Assert._ +import org.junit.Assert._ object SerializationTestUtils { diff --git a/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala b/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala index abe511f..2d3a9c3 100644 --- a/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala +++ b/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala @@ -22,11 +22,10 @@ import java.nio.ByteBuffer import kafka.utils.Logging import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.Test -import org.scalatest.junit.JUnit3Suite import scala.collection.mutable -class BrokerEndPointTest extends JUnit3Suite with Logging { +class BrokerEndPointTest extends Logging { @Test def testSerDe() = { diff --git a/core/src/test/scala/unit/kafka/common/ConfigTest.scala b/core/src/test/scala/unit/kafka/common/ConfigTest.scala index 0aca938..a42836c 100644 --- a/core/src/test/scala/unit/kafka/common/ConfigTest.scala +++ b/core/src/test/scala/unit/kafka/common/ConfigTest.scala @@ -17,7 +17,7 @@ package kafka.common -import junit.framework.Assert._ +import org.junit.Assert._ import collection.mutable.ArrayBuffer import org.junit.Test import kafka.producer.ProducerConfig diff --git a/core/src/test/scala/unit/kafka/common/TopicTest.scala b/core/src/test/scala/unit/kafka/common/TopicTest.scala index 79532c8..137dcc4 100644 --- a/core/src/test/scala/unit/kafka/common/TopicTest.scala +++ b/core/src/test/scala/unit/kafka/common/TopicTest.scala @@ -17,7 +17,7 @@ package kafka.common -import junit.framework.Assert._ +import org.junit.Assert._ import collection.mutable.ArrayBuffer import org.junit.Test diff --git a/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala b/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala index db5302f..ca63c80 100755 --- a/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala @@ -18,22 +18,20 @@ package kafka.consumer -import java.util.Properties import java.util.concurrent._ import java.util.concurrent.atomic._ import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.message._ import kafka.server._ import kafka.utils.TestUtils._ import kafka.utils._ -import org.junit.Test +import org.junit.{Before, Test} import kafka.serializer._ -import org.scalatest.junit.JUnit3Suite import kafka.integration.KafkaServerTestHarness -class ConsumerIteratorTest extends JUnit3Suite with KafkaServerTestHarness { +class ConsumerIteratorTest extends KafkaServerTestHarness { val numNodes = 1 @@ -49,6 +47,7 @@ class ConsumerIteratorTest extends JUnit3Suite with KafkaServerTestHarness { def consumerConfig = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer0)) + @Before override def setUp() { super.setUp() topicInfos = configs.map(c => new PartitionTopicInfo(topic, diff --git a/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala b/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala index adf0801..c1071b8 100644 --- a/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala @@ -17,18 +17,17 @@ package kafka.consumer -import org.scalatest.junit.JUnit3Suite import org.easymock.EasyMock import org.I0Itec.zkclient.ZkClient import org.apache.zookeeper.data.Stat import kafka.utils.{TestUtils, Logging, ZkUtils, Json} -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.common.TopicAndPartition import kafka.consumer.PartitionAssignorTest.StaticSubscriptionInfo import kafka.consumer.PartitionAssignorTest.Scenario import kafka.consumer.PartitionAssignorTest.WildcardSubscriptionInfo -class PartitionAssignorTest extends JUnit3Suite with Logging { +class PartitionAssignorTest extends Logging { def testRoundRobinPartitionAssignor() { val assignor = new RoundRobinAssignor diff --git a/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala b/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala index 4b326d0..2e18e92 100644 --- a/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala @@ -18,7 +18,7 @@ package kafka.consumer -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.Test import kafka.server.OffsetManager diff --git a/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala b/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala index 359b0f5..c851e27 100644 --- a/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala @@ -19,7 +19,7 @@ package kafka.consumer import java.util.{Collections, Properties} -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.common.MessageStreamsExistException import kafka.integration.KafkaServerTestHarness import kafka.javaapi.consumer.ConsumerRebalanceListener @@ -30,11 +30,11 @@ import kafka.utils.TestUtils._ import kafka.utils._ import org.I0Itec.zkclient.ZkClient import org.apache.log4j.{Level, Logger} -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import scala.collection._ -class ZookeeperConsumerConnectorTest extends JUnit3Suite with KafkaServerTestHarness with Logging { +class ZookeeperConsumerConnectorTest extends KafkaServerTestHarness with Logging { val RebalanceBackoffMs = 5000 var dirs : ZKGroupTopicDirs = null @@ -54,11 +54,13 @@ class ZookeeperConsumerConnectorTest extends JUnit3Suite with KafkaServerTestHar val consumer3 = "consumer3" val nMessages = 2 + @Before override def setUp() { super.setUp() dirs = new ZKGroupTopicDirs(group, topic) } + @After override def tearDown() { super.tearDown() } diff --git a/core/src/test/scala/unit/kafka/coordinator/ConsumerCoordinatorResponseTest.scala b/core/src/test/scala/unit/kafka/coordinator/ConsumerCoordinatorResponseTest.scala index 87a5330..d419799 100644 --- a/core/src/test/scala/unit/kafka/coordinator/ConsumerCoordinatorResponseTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/ConsumerCoordinatorResponseTest.scala @@ -20,10 +20,10 @@ package kafka.coordinator import java.util.concurrent.TimeUnit -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.common.TopicAndPartition -import kafka.server.{OffsetManager, ReplicaManager, KafkaConfig} -import kafka.utils.{KafkaScheduler, TestUtils} +import kafka.server.{OffsetManager, KafkaConfig} +import kafka.utils.TestUtils import org.apache.kafka.common.protocol.Errors import org.apache.kafka.common.requests.JoinGroupRequest import org.easymock.EasyMock diff --git a/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala b/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala index b69c993..5d812c2 100644 --- a/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala @@ -17,7 +17,7 @@ package kafka.coordinator -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.{Before, Test} import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala b/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala index 2cbf6e2..d8a7948 100644 --- a/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala @@ -20,7 +20,7 @@ package kafka.coordinator import kafka.server.KafkaConfig import kafka.utils.{ZkUtils, TestUtils} -import junit.framework.Assert._ +import org.junit.Assert._ import org.I0Itec.zkclient.{IZkDataListener, ZkClient} import org.apache.zookeeper.data.Stat import org.easymock.EasyMock diff --git a/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala b/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala index 887cee5..79c691f 100644 --- a/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala @@ -19,7 +19,7 @@ package kafka.coordinator import kafka.common.TopicAndPartition -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala b/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala index 139dc9a..818673f 100644 --- a/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala +++ b/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala @@ -24,12 +24,11 @@ import kafka.utils.TestUtils import kafka.serializer._ import kafka.producer.{Producer, KeyedMessage} -import org.junit.Test +import org.junit.{After, Before, Test} import org.apache.log4j.{Level, Logger} -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.Assert._ -class AutoOffsetResetTest extends JUnit3Suite with KafkaServerTestHarness with Logging { +class AutoOffsetResetTest extends KafkaServerTestHarness with Logging { def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0, zkConnect))) @@ -42,12 +41,14 @@ class AutoOffsetResetTest extends JUnit3Suite with KafkaServerTestHarness with L val requestHandlerLogger = Logger.getLogger(classOf[kafka.server.KafkaRequestHandler]) + @Before override def setUp() { super.setUp() // temporarily set request handler logger to a higher level requestHandlerLogger.setLevel(Level.FATAL) } + @After override def tearDown() { // restore set request handler logger to a higher level requestHandlerLogger.setLevel(Level.ERROR) diff --git a/core/src/test/scala/unit/kafka/integration/FetcherTest.scala b/core/src/test/scala/unit/kafka/integration/FetcherTest.scala index facebd8..92af0a1 100644 --- a/core/src/test/scala/unit/kafka/integration/FetcherTest.scala +++ b/core/src/test/scala/unit/kafka/integration/FetcherTest.scala @@ -19,18 +19,17 @@ package kafka.integration import java.util.concurrent._ import java.util.concurrent.atomic._ +import org.junit.{After, Before} + import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.cluster._ import kafka.server._ -import org.scalatest.junit.JUnit3Suite import kafka.consumer._ -import kafka.serializer._ -import kafka.producer.{KeyedMessage, Producer} import kafka.utils.TestUtils -class FetcherTest extends JUnit3Suite with KafkaServerTestHarness { +class FetcherTest extends KafkaServerTestHarness { val numNodes = 1 def generateConfigs() = TestUtils.createBrokerConfigs(numNodes, zkConnect).map(KafkaConfig.fromProps) @@ -40,6 +39,7 @@ class FetcherTest extends JUnit3Suite with KafkaServerTestHarness { var fetcher: ConsumerFetcherManager = null + @Before override def setUp() { super.setUp TestUtils.createTopic(zkClient, topic, partitionReplicaAssignment = Map(0 -> Seq(configs.head.brokerId)), servers = servers) @@ -59,6 +59,7 @@ class FetcherTest extends JUnit3Suite with KafkaServerTestHarness { fetcher.startConnections(topicInfos, cluster) } + @After override def tearDown() { fetcher.stopConnections() super.tearDown diff --git a/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala b/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala index 87c6315..bca0dcc 100755 --- a/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala +++ b/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala @@ -19,17 +19,18 @@ package kafka.integration import java.util.Arrays -import scala.collection.mutable.Buffer +import kafka.common.KafkaException import kafka.server._ import kafka.utils.{CoreUtils, TestUtils} -import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness -import kafka.common.KafkaException +import org.junit.{After, Before} + +import scala.collection.mutable.Buffer /** * A test harness that brings up some number of broker nodes */ -trait KafkaServerTestHarness extends JUnit3Suite with ZooKeeperTestHarness { +trait KafkaServerTestHarness extends ZooKeeperTestHarness { var instanceConfigs: Seq[KafkaConfig] = null var servers: Buffer[KafkaServer] = null var brokerList: String = null @@ -51,7 +52,7 @@ trait KafkaServerTestHarness extends JUnit3Suite with ZooKeeperTestHarness { def bootstrapUrl = servers.map(s => s.config.hostName + ":" + s.boundPort()).mkString(",") - + @Before override def setUp() { super.setUp if(configs.size <= 0) @@ -62,6 +63,7 @@ trait KafkaServerTestHarness extends JUnit3Suite with ZooKeeperTestHarness { Arrays.fill(alive, true) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(_.config.logDirs.foreach(CoreUtils.rm(_))) diff --git a/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala b/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala index a2c9713..3c1cade 100644 --- a/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala +++ b/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala @@ -21,9 +21,8 @@ import java.util.Properties import kafka.server.KafkaConfig import kafka.utils.TestUtils -import org.scalatest.junit.JUnit3Suite -class MinIsrConfigTest extends JUnit3Suite with KafkaServerTestHarness { +class MinIsrConfigTest extends KafkaServerTestHarness { val overridingProps = new Properties() overridingProps.put(KafkaConfig.MinInSyncReplicasProp, "5") diff --git a/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala b/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala index 6a758a7..e05d16b 100755 --- a/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala +++ b/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala @@ -18,13 +18,12 @@ package kafka.integration import java.nio.ByteBuffer -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.api.{PartitionFetchInfo, FetchRequest, FetchRequestBuilder} import kafka.server.{KafkaRequestHandler, KafkaConfig} import kafka.producer.{KeyedMessage, Producer} import org.apache.log4j.{Level, Logger} import kafka.zk.ZooKeeperTestHarness -import org.scalatest.junit.JUnit3Suite import scala.collection._ import kafka.common.{TopicAndPartition, ErrorMapping, UnknownTopicOrPartitionException, OffsetOutOfRangeException} import kafka.utils.{StaticPartitioner, TestUtils, CoreUtils} @@ -34,7 +33,7 @@ import java.util.Properties /** * End to end tests of the primitive apis against a local server */ -class PrimitiveApiTest extends JUnit3Suite with ProducerConsumerTestHarness with ZooKeeperTestHarness { +class PrimitiveApiTest extends ProducerConsumerTestHarness with ZooKeeperTestHarness { val requestHandlerLogger = Logger.getLogger(classOf[KafkaRequestHandler]) def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0, zkConnect))) diff --git a/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala b/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala index 4614a92..cc5954d 100644 --- a/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala +++ b/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala @@ -5,8 +5,8 @@ * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,28 +18,30 @@ package kafka.integration import kafka.consumer.SimpleConsumer -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.producer.Producer import kafka.utils.{StaticPartitioner, TestUtils} import kafka.serializer.StringEncoder -trait ProducerConsumerTestHarness extends JUnit3Suite with KafkaServerTestHarness { - val host = "localhost" - var producer: Producer[String, String] = null - var consumer: SimpleConsumer = null +trait ProducerConsumerTestHarness extends KafkaServerTestHarness { + val host = "localhost" + var producer: Producer[String, String] = null + var consumer: SimpleConsumer = null + @Before override def setUp() { - super.setUp - producer = TestUtils.createProducer[String, String](TestUtils.getBrokerListStrFromServers(servers), - encoder = classOf[StringEncoder].getName, - keyEncoder = classOf[StringEncoder].getName, - partitioner = classOf[StaticPartitioner].getName) - consumer = new SimpleConsumer(host, servers(0).boundPort(), 1000000, 64*1024, "") - } + super.setUp + producer = TestUtils.createProducer[String, String](TestUtils.getBrokerListStrFromServers(servers), + encoder = classOf[StringEncoder].getName, + keyEncoder = classOf[StringEncoder].getName, + partitioner = classOf[StaticPartitioner].getName) + consumer = new SimpleConsumer(host, servers(0).boundPort(), 1000000, 64 * 1024, "") + } - override def tearDown() { - producer.close() - consumer.close() - super.tearDown - } + @After + override def tearDown() { + producer.close() + consumer.close() + super.tearDown + } } diff --git a/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala b/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala index 12d0733..2fd10d8 100755 --- a/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala +++ b/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala @@ -17,18 +17,19 @@ package kafka.integration -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.{CoreUtils, TestUtils} import kafka.server.{KafkaConfig, KafkaServer} -class RollingBounceTest extends JUnit3Suite with ZooKeeperTestHarness { +class RollingBounceTest extends ZooKeeperTestHarness { val partitionId = 0 var servers: Seq[KafkaServer] = null + @Before override def setUp() { super.setUp() // controlled.shutdown.enable is true by default @@ -39,6 +40,7 @@ class RollingBounceTest extends JUnit3Suite with ZooKeeperTestHarness { servers = configs.map(c => TestUtils.createServer(KafkaConfig.fromProps(c))) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala b/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala index 5b6c9d6..2e0db2f 100644 --- a/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala +++ b/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala @@ -19,9 +19,8 @@ package kafka.integration import java.nio.ByteBuffer -import junit.framework.Assert._ import kafka.admin.AdminUtils -import kafka.api.{TopicMetadataResponse, TopicMetadataRequest} +import kafka.api.{TopicMetadataRequest, TopicMetadataResponse} import kafka.client.ClientUtils import kafka.cluster.{Broker, BrokerEndPoint} import kafka.common.ErrorMapping @@ -30,14 +29,16 @@ import kafka.utils.TestUtils import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite +import org.junit.Assert._ +import org.junit.{After, Before} -class TopicMetadataTest extends JUnit3Suite with ZooKeeperTestHarness { +class TopicMetadataTest extends ZooKeeperTestHarness { private var server1: KafkaServer = null var brokerEndPoints: Seq[BrokerEndPoint] = null var adHocConfigs: Seq[KafkaConfig] = null val numConfigs: Int = 4 + @Before override def setUp() { super.setUp() val props = createBrokerConfigs(numConfigs, zkConnect) @@ -47,6 +48,7 @@ class TopicMetadataTest extends JUnit3Suite with ZooKeeperTestHarness { brokerEndPoints = Seq(new Broker(server1.config.brokerId, server1.config.hostName, server1.boundPort()).getBrokerEndPoint(SecurityProtocol.PLAINTEXT)) } + @After override def tearDown() { server1.shutdown() super.tearDown() diff --git a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala index e4bf2df..28f6cc3 100755 --- a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala +++ b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala @@ -18,24 +18,22 @@ package kafka.integration import org.apache.kafka.common.config.ConfigException +import org.junit.{After, Before} -import scala.collection.mutable.MutableList import scala.util.Random import org.apache.log4j.{Level, Logger} -import org.scalatest.junit.JUnit3Suite import java.util.Properties -import junit.framework.Assert._ import kafka.admin.AdminUtils import kafka.common.FailedToSendMessageException -import kafka.consumer.{Consumer, ConsumerConfig, ConsumerTimeoutException} -import kafka.producer.{KeyedMessage, Producer} +import kafka.consumer.{Consumer, ConsumerConfig} import kafka.serializer.StringDecoder import kafka.server.{KafkaConfig, KafkaServer} import kafka.utils.CoreUtils import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness +import org.junit.Assert._ -class UncleanLeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { +class UncleanLeaderElectionTest extends ZooKeeperTestHarness { val brokerId1 = 0 val brokerId2 = 1 @@ -58,6 +56,7 @@ class UncleanLeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { val syncProducerLogger = Logger.getLogger(classOf[kafka.producer.SyncProducer]) val eventHandlerLogger = Logger.getLogger(classOf[kafka.producer.async.DefaultEventHandler[Object, Object]]) + @Before override def setUp() { super.setUp() @@ -77,6 +76,7 @@ class UncleanLeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { eventHandlerLogger.setLevel(Level.FATAL) } + @After override def tearDown() { servers.foreach(server => shutdownServer(server)) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala b/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala index 74c761d..cf6b9a9 100644 --- a/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala +++ b/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala @@ -20,7 +20,6 @@ package kafka.javaapi.consumer import java.util.Properties import kafka.server._ -import kafka.message._ import kafka.serializer._ import kafka.integration.KafkaServerTestHarness import kafka.producer.KeyedMessage @@ -33,12 +32,11 @@ import kafka.common.MessageStreamsExistException import scala.collection.JavaConversions -import org.scalatest.junit.JUnit3Suite import org.apache.log4j.{Level, Logger} -import junit.framework.Assert._ +import org.junit.Assert._ -class ZookeeperConsumerConnectorTest extends JUnit3Suite with KafkaServerTestHarness with ZooKeeperTestHarness with Logging { +class ZookeeperConsumerConnectorTest extends KafkaServerTestHarness with ZooKeeperTestHarness with Logging { val numNodes = 2 val numParts = 2 val topic = "topic1" diff --git a/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala b/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala index 726399e..80f809e 100644 --- a/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala +++ b/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala @@ -17,7 +17,7 @@ package kafka.javaapi.message -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.Test import kafka.utils.TestUtils diff --git a/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala b/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala index 383fcef..fbdb000 100644 --- a/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala +++ b/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala @@ -17,7 +17,7 @@ package kafka.javaapi.message -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import kafka.message.{DefaultCompressionCodec, CompressionCodec, NoCompressionCodec, Message} diff --git a/core/src/test/scala/unit/kafka/log/CleanerTest.scala b/core/src/test/scala/unit/kafka/log/CleanerTest.scala index 0e2a6a1..9c4518c 100755 --- a/core/src/test/scala/unit/kafka/log/CleanerTest.scala +++ b/core/src/test/scala/unit/kafka/log/CleanerTest.scala @@ -17,19 +17,20 @@ package kafka.log +import java.io.File +import java.nio._ import java.util.Properties +import java.util.concurrent.atomic.AtomicLong -import junit.framework.Assert._ -import org.scalatest.junit.JUnitSuite -import org.junit.{After, Test} -import java.nio._ -import java.io.File -import scala.collection._ import kafka.common._ -import kafka.utils._ import kafka.message._ -import java.util.concurrent.atomic.AtomicLong +import kafka.utils._ import org.apache.kafka.common.utils.Utils +import org.junit.Assert._ +import org.junit.{After, Test} +import org.scalatest.junit.JUnitSuite + +import scala.collection._ /** * Unit tests for the log cleaning logic diff --git a/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala b/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala index 02cf668..95085f4 100644 --- a/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala +++ b/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala @@ -20,7 +20,7 @@ package kafka.log import java.io._ import java.nio._ import java.util.concurrent.atomic._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.TestUtils._ import kafka.message._ import org.junit.Test diff --git a/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala b/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala index 381e9aa..70beb5f 100755 --- a/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala @@ -30,16 +30,14 @@ import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.runners.Parameterized.Parameters -import org.scalatest.junit.JUnit3Suite import scala.collection._ - /** * This is an integration test that tests the fully integrated log cleaner */ @RunWith(value = classOf[Parameterized]) -class LogCleanerIntegrationTest(compressionCodec: String) extends JUnit3Suite { +class LogCleanerIntegrationTest(compressionCodec: String) { val time = new MockTime() val segmentSize = 100 diff --git a/core/src/test/scala/unit/kafka/log/LogConfigTest.scala b/core/src/test/scala/unit/kafka/log/LogConfigTest.scala index 19dcb47..50238aa 100644 --- a/core/src/test/scala/unit/kafka/log/LogConfigTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogConfigTest.scala @@ -21,9 +21,9 @@ import java.util.Properties import org.apache.kafka.common.config.ConfigException import org.junit.{Assert, Test} -import org.scalatest.junit.JUnit3Suite +import org.scalatest.Assertions._ -class LogConfigTest extends JUnit3Suite { +class LogConfigTest { @Test def testFromPropsEmpty() { diff --git a/core/src/test/scala/unit/kafka/log/LogManagerTest.scala b/core/src/test/scala/unit/kafka/log/LogManagerTest.scala index a13f2be..816354f 100755 --- a/core/src/test/scala/unit/kafka/log/LogManagerTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogManagerTest.scala @@ -19,14 +19,14 @@ package kafka.log import java.io._ import java.util.Properties -import junit.framework.Assert._ -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import kafka.server.{BrokerState, OffsetCheckpoint} + import kafka.common._ +import kafka.server.OffsetCheckpoint import kafka.utils._ +import org.junit.Assert._ +import org.junit.{After, Before, Test} -class LogManagerTest extends JUnit3Suite { +class LogManagerTest { val time: MockTime = new MockTime() val maxRollInterval = 100 @@ -41,20 +41,20 @@ class LogManagerTest extends JUnit3Suite { val name = "kafka" val veryLargeLogFlushInterval = 10000000L - override def setUp() { - super.setUp() + @Before + def setUp() { logDir = TestUtils.tempDir() logManager = createLogManager() logManager.startup logDir = logManager.logDirs(0) } - override def tearDown() { + @After + def tearDown() { if(logManager != null) logManager.shutdown() CoreUtils.rm(logDir) logManager.logDirs.foreach(CoreUtils.rm(_)) - super.tearDown() } /** diff --git a/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala b/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala index abcd1f0..fa982b1 100644 --- a/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala @@ -16,19 +16,15 @@ */ package kafka.log -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ -import java.io.File -import java.io.RandomAccessFile -import java.util.Random import org.junit.{Test, After} -import org.scalatest.junit.JUnit3Suite import kafka.utils.TestUtils import kafka.message._ import kafka.utils.SystemTime import scala.collection._ -class LogSegmentTest extends JUnit3Suite { +class LogSegmentTest { val segments = mutable.ArrayBuffer[LogSegment]() diff --git a/core/src/test/scala/unit/kafka/log/LogTest.scala b/core/src/test/scala/unit/kafka/log/LogTest.scala index 9e26190..7f0d9d6 100755 --- a/core/src/test/scala/unit/kafka/log/LogTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogTest.scala @@ -20,7 +20,7 @@ package kafka.log import java.io._ import java.util.Properties import java.util.concurrent.atomic._ -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.{After, Before, Test} import kafka.message._ diff --git a/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala b/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala index 9213a5d..dfd7b54 100644 --- a/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala +++ b/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala @@ -18,7 +18,7 @@ package kafka.log import java.io._ -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.{Collections, Arrays} import org.junit._ import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala b/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala index 12ce39e..f50daa4 100644 --- a/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala +++ b/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala @@ -20,7 +20,7 @@ package kafka.log import java.nio._ import org.junit._ import org.scalatest.junit.JUnitSuite -import junit.framework.Assert._ +import org.junit.Assert._ class OffsetMapTest extends JUnitSuite { diff --git a/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala b/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala index dd8847f..208994b 100644 --- a/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala +++ b/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala @@ -18,7 +18,7 @@ package kafka.message import java.io.RandomAccessFile -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.TestUtils._ import kafka.log.FileMessageSet import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala b/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala index 07bc317..511060e 100644 --- a/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala +++ b/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala @@ -19,7 +19,7 @@ package kafka.message import java.nio._ import java.util.concurrent.atomic.AtomicLong -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import kafka.utils.TestUtils diff --git a/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala b/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala index 76987d4..f45bead 100644 --- a/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala +++ b/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala @@ -21,7 +21,7 @@ import java.io.ByteArrayOutputStream import scala.collection._ import org.scalatest.junit.JUnitSuite import org.junit._ -import junit.framework.Assert._ +import org.junit.Assert._ class MessageCompressionTest extends JUnitSuite { diff --git a/core/src/test/scala/unit/kafka/message/MessageTest.scala b/core/src/test/scala/unit/kafka/message/MessageTest.scala index 11c0f81..3c12d13 100755 --- a/core/src/test/scala/unit/kafka/message/MessageTest.scala +++ b/core/src/test/scala/unit/kafka/message/MessageTest.scala @@ -20,7 +20,7 @@ package kafka.message import java.nio._ import java.util.HashMap import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.{Before, Test} import kafka.utils.TestUtils diff --git a/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala b/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala index b08a343..3993fdb 100644 --- a/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala +++ b/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala @@ -20,7 +20,7 @@ package kafka.message import java.io.{InputStream, ByteArrayInputStream, ByteArrayOutputStream} import java.nio.ByteBuffer import java.util.Random -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala b/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala index 7df7405..3b3e4c3 100644 --- a/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala +++ b/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala @@ -18,12 +18,11 @@ package kafka.metrics */ import org.junit.Test -import org.scalatest.junit.JUnit3Suite import java.util.concurrent.TimeUnit -import junit.framework.Assert._ +import org.junit.Assert._ import com.yammer.metrics.core.{MetricsRegistry, Clock} -class KafkaTimerTest extends JUnit3Suite { +class KafkaTimerTest { @Test def testKafkaTimer() { diff --git a/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala b/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala index b42101b..5a268d1 100644 --- a/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala +++ b/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala @@ -22,10 +22,9 @@ import java.util.Properties import com.yammer.metrics.Metrics import com.yammer.metrics.core.MetricPredicate import org.junit.Test -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.integration.KafkaServerTestHarness import kafka.server._ -import kafka.message._ import kafka.serializer._ import kafka.utils._ import kafka.admin.AdminUtils @@ -33,9 +32,8 @@ import kafka.utils.TestUtils._ import scala.collection._ import scala.collection.JavaConversions._ import scala.util.matching.Regex -import org.scalatest.junit.JUnit3Suite -class MetricsTest extends JUnit3Suite with KafkaServerTestHarness with Logging { +class MetricsTest extends KafkaServerTestHarness with Logging { val numNodes = 2 val numParts = 2 val topic = "topic1" diff --git a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala index 7dc2fad..d08b8b8 100644 --- a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala +++ b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala @@ -17,27 +17,26 @@ package kafka.network; -import java.net._ import java.io._ +import java.net._ +import java.nio.ByteBuffer +import java.util.Random + +import kafka.api.ProducerRequest import kafka.cluster.EndPoint +import kafka.common.TopicAndPartition +import kafka.message.ByteBufferMessageSet +import kafka.producer.SyncProducerConfig import org.apache.kafka.common.metrics.Metrics import org.apache.kafka.common.network.NetworkSend import org.apache.kafka.common.protocol.SecurityProtocol import org.apache.kafka.common.utils.SystemTime +import org.junit.Assert._ import org.junit._ -import org.scalatest.junit.JUnitSuite -import java.util.Random -import junit.framework.Assert._ -import kafka.producer.SyncProducerConfig -import kafka.api.ProducerRequest -import java.nio.ByteBuffer -import kafka.common.TopicAndPartition -import kafka.message.ByteBufferMessageSet -import java.nio.channels.SelectionKey -import kafka.utils.TestUtils + import scala.collection.Map -class SocketServerTest extends JUnitSuite { +class SocketServerTest { val server: SocketServer = new SocketServer(0, Map(SecurityProtocol.PLAINTEXT -> EndPoint(null, 0, SecurityProtocol.PLAINTEXT), @@ -84,11 +83,11 @@ class SocketServerTest extends JUnitSuite { new Socket("localhost", server.boundPort(protocol)) } - @After def cleanup() { server.shutdown() } + @Test def simpleRequest() { val plainSocket = connect(protocol = SecurityProtocol.PLAINTEXT) @@ -175,7 +174,7 @@ class SocketServerTest extends JUnitSuite { } @Test - def testMaxConnectionsPerIPOverrides(): Unit = { + def testMaxConnectionsPerIPOverrides() { val overrideNum = 6 val overrides: Map[String, Int] = Map("localhost" -> overrideNum) val overrideServer: SocketServer = new SocketServer(0, diff --git a/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala b/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala index be4bb87..b54f30e 100755 --- a/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala +++ b/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala @@ -19,36 +19,27 @@ package kafka.producer import java.util.Properties import java.util.concurrent.LinkedBlockingQueue -import junit.framework.Assert._ +import org.junit.Assert._ import org.easymock.EasyMock import org.junit.Test import kafka.api._ -import kafka.cluster.{BrokerEndPoint, Broker} +import kafka.cluster.BrokerEndPoint import kafka.common._ import kafka.message._ import kafka.producer.async._ import kafka.serializer._ import kafka.server.KafkaConfig import kafka.utils.TestUtils._ -import org.scalatest.junit.JUnit3Suite import scala.collection.Map import scala.collection.mutable.ArrayBuffer import kafka.utils._ -class AsyncProducerTest extends JUnit3Suite { +class AsyncProducerTest { // One of the few cases we can just set a fixed port because the producer is mocked out here since this uses mocks val props = Seq(createBrokerConfig(1, "127.0.0.1:1", port=65534)) val configs = props.map(KafkaConfig.fromProps) val brokerList = configs.map(c => org.apache.kafka.common.utils.Utils.formatAddress(c.hostName, c.port)).mkString(",") - override def setUp() { - super.setUp() - } - - override def tearDown() { - super.tearDown() - } - @Test def testProducerQueueSize() { // a mock event handler that blocks diff --git a/core/src/test/scala/unit/kafka/producer/ProducerTest.scala b/core/src/test/scala/unit/kafka/producer/ProducerTest.scala index 4d2536b..dd96d29 100755 --- a/core/src/test/scala/unit/kafka/producer/ProducerTest.scala +++ b/core/src/test/scala/unit/kafka/producer/ProducerTest.scala @@ -17,26 +17,24 @@ package kafka.producer -import org.scalatest.TestFailedException -import org.scalatest.junit.JUnit3Suite -import kafka.consumer.SimpleConsumer -import kafka.message.Message -import kafka.server.{KafkaConfig, KafkaRequestHandler, KafkaServer} -import kafka.zk.ZooKeeperTestHarness -import org.apache.log4j.{Level, Logger} -import org.junit.Test -import kafka.utils._ import java.util +import java.util.Properties + import kafka.admin.AdminUtils -import util.Properties import kafka.api.FetchRequestBuilder -import org.junit.Assert.assertTrue -import org.junit.Assert.assertFalse -import org.junit.Assert.assertEquals import kafka.common.{ErrorMapping, FailedToSendMessageException} +import kafka.consumer.SimpleConsumer +import kafka.message.Message import kafka.serializer.StringEncoder +import kafka.server.{KafkaConfig, KafkaRequestHandler, KafkaServer} +import kafka.utils._ +import kafka.zk.ZooKeeperTestHarness +import org.apache.log4j.{Level, Logger} +import org.junit.Assert._ +import org.junit.{After, Before, Test} +import org.scalatest.exceptions.TestFailedException -class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ +class ProducerTest extends ZooKeeperTestHarness with Logging{ private val brokerId1 = 0 private val brokerId2 = 1 private var server1: KafkaServer = null @@ -60,6 +58,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ consumer2 } + @Before override def setUp() { super.setUp() // set up 2 brokers with 4 partitions each @@ -81,6 +80,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ requestHandlerLogger.setLevel(Level.FATAL) } + @After override def tearDown() { // restore set request handler logger to a higher level requestHandlerLogger.setLevel(Level.ERROR) diff --git a/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala b/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala index 8c3fb7a..90689f6 100644 --- a/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala +++ b/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala @@ -20,7 +20,7 @@ package kafka.producer import java.net.SocketTimeoutException import java.util.Properties -import junit.framework.Assert +import org.junit.Assert import kafka.admin.AdminUtils import kafka.api.ProducerResponseStatus import kafka.common.{ErrorMapping, TopicAndPartition} @@ -30,9 +30,8 @@ import kafka.server.KafkaConfig import kafka.utils._ import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.Test -import org.scalatest.junit.JUnit3Suite -class SyncProducerTest extends JUnit3Suite with KafkaServerTestHarness { +class SyncProducerTest extends KafkaServerTestHarness { private val messageBytes = new Array[Byte](2) // turning off controlled shutdown since testProducerCanTimeout() explicitly shuts down request handler pool. def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfigs(1, zkConnect, false).head)) diff --git a/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala b/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala index e899b02..066f506 100755 --- a/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala +++ b/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala @@ -17,18 +17,19 @@ package kafka.server -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.{TestUtils, CoreUtils, ZkUtils} import kafka.zk.ZooKeeperTestHarness import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} -class AdvertiseBrokerTest extends JUnit3Suite with ZooKeeperTestHarness { +class AdvertiseBrokerTest extends ZooKeeperTestHarness { var server : KafkaServer = null val brokerId = 0 val advertisedHostName = "routable-host" val advertisedPort = 1234 + @Before override def setUp() { super.setUp() @@ -39,6 +40,7 @@ class AdvertiseBrokerTest extends JUnit3Suite with ZooKeeperTestHarness { server = TestUtils.createServer(KafkaConfig.fromProps(props)) } + @After override def tearDown() { server.shutdown() CoreUtils.rm(server.config.logDirs) diff --git a/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala b/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala index f3ab3f4..df8d5b1 100644 --- a/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala +++ b/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala @@ -17,22 +17,21 @@ package kafka.server -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.{After, Before, Test} +import org.junit.Assert._ -class DelayedOperationTest extends JUnit3Suite { +class DelayedOperationTest { var purgatory: DelayedOperationPurgatory[MockDelayedOperation] = null - override def setUp() { - super.setUp() + @Before + def setUp() { purgatory = new DelayedOperationPurgatory[MockDelayedOperation](purgatoryName = "mock") } - override def tearDown() { + @After + def tearDown() { purgatory.shutdown() - super.tearDown() } @Test diff --git a/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala b/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala index 8a871cf..9ba2cfb 100644 --- a/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala +++ b/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala @@ -18,16 +18,15 @@ package kafka.server import java.util.Properties -import junit.framework.Assert._ -import org.junit.Test -import kafka.integration.KafkaServerTestHarness -import kafka.utils._ +import kafka.admin.{AdminOperationException, AdminUtils} import kafka.common._ +import kafka.integration.KafkaServerTestHarness import kafka.log.LogConfig -import kafka.admin.{AdminOperationException, AdminUtils} -import org.scalatest.junit.JUnit3Suite +import kafka.utils._ +import org.junit.Assert._ +import org.junit.Test -class DynamicConfigChangeTest extends JUnit3Suite with KafkaServerTestHarness { +class DynamicConfigChangeTest extends KafkaServerTestHarness { def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0, zkConnect))) @Test diff --git a/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala b/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala index 60cd824..7f55a80 100755 --- a/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala +++ b/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala @@ -19,7 +19,6 @@ package kafka.server import kafka.log._ import java.io.File import org.I0Itec.zkclient.ZkClient -import org.scalatest.junit.JUnit3Suite import org.easymock.EasyMock import org.junit._ import org.junit.Assert._ @@ -28,7 +27,7 @@ import kafka.cluster.Replica import kafka.utils.{SystemTime, KafkaScheduler, TestUtils, MockTime, CoreUtils} import java.util.concurrent.atomic.AtomicBoolean -class HighwatermarkPersistenceTest extends JUnit3Suite { +class HighwatermarkPersistenceTest { val configs = TestUtils.createBrokerConfigs(2, TestUtils.MockZkConnect).map(KafkaConfig.fromProps) val topic = "foo" diff --git a/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala b/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala index 90529fa..25f0d41 100644 --- a/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala +++ b/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala @@ -18,7 +18,7 @@ package kafka.server import java.util.Properties -import org.scalatest.junit.JUnit3Suite +import org.junit.{Before, After} import collection.mutable.HashMap import collection.mutable.Map import kafka.cluster.{Partition, Replica} @@ -30,7 +30,7 @@ import java.util.concurrent.atomic.AtomicBoolean import kafka.message.MessageSet -class IsrExpirationTest extends JUnit3Suite { +class IsrExpirationTest { var topicPartitionIsr: Map[(String, Int), Seq[Int]] = new HashMap[(String, Int), Seq[Int]]() val replicaLagTimeMaxMs = 100L @@ -46,14 +46,14 @@ class IsrExpirationTest extends JUnit3Suite { var replicaManager: ReplicaManager = null - override def setUp() { - super.setUp() + @Before + def setUp() { replicaManager = new ReplicaManager(configs.head, time, null, null, null, new AtomicBoolean(false)) } - override def tearDown() { + @After + def tearDown() { replicaManager.shutdown(false) - super.tearDown() } /* diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala index 98a5b04..79ac5af 100644 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala @@ -22,12 +22,12 @@ import kafka.api.ApiVersion import kafka.message._ import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.{Assert, Test} -import org.scalatest.junit.JUnit3Suite +import org.scalatest.Assertions._ import scala.collection.Map import scala.util.Random._ -class KafkaConfigConfigDefTest extends JUnit3Suite { +class KafkaConfigConfigDefTest { @Test def testFromPropsEmpty() { diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index d354452..4ca2e47 100755 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -19,15 +19,15 @@ package kafka.server import java.util.Properties -import junit.framework.Assert._ import kafka.api.{ApiVersion, KAFKA_082} import kafka.utils.{TestUtils, CoreUtils} import org.apache.kafka.common.config.ConfigException import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.Test -import org.scalatest.junit.JUnit3Suite +import org.junit.Assert._ +import org.scalatest.Assertions._ -class KafkaConfigTest extends JUnit3Suite { +class KafkaConfigTest { @Test def testLogRetentionTimeHoursProvided() { diff --git a/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala b/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala index f1977d8..f77f186 100755 --- a/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala +++ b/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala @@ -17,7 +17,7 @@ package kafka.server -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.api._ import kafka.utils.{TestUtils, ZkUtils, CoreUtils} import kafka.cluster.Broker @@ -26,9 +26,9 @@ import kafka.controller.{ControllerChannelManager, ControllerContext, LeaderIsrA import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} -class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { +class LeaderElectionTest extends ZooKeeperTestHarness { val brokerId1 = 0 val brokerId2 = 1 @@ -36,6 +36,7 @@ class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { var staleControllerEpochDetected = false + @Before override def setUp() { super.setUp() @@ -48,6 +49,7 @@ class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { servers ++= List(server1, server2) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala b/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala index e57c1de..344001d 100755 --- a/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala +++ b/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala @@ -19,12 +19,11 @@ package kafka.server import java.io.File import kafka.utils._ -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.{Random, Properties} import kafka.consumer.SimpleConsumer import kafka.message.{NoCompressionCodec, ByteBufferMessageSet, Message} import kafka.zk.ZooKeeperTestHarness -import org.scalatest.junit.JUnit3Suite import kafka.admin.AdminUtils import kafka.api.{PartitionOffsetRequestInfo, FetchRequestBuilder, OffsetRequest} import kafka.utils.TestUtils._ @@ -33,7 +32,7 @@ import org.junit.After import org.junit.Before import org.junit.Test -class LogOffsetTest extends JUnit3Suite with ZooKeeperTestHarness { +class LogOffsetTest extends ZooKeeperTestHarness { val random = new Random() var logDir: File = null var topicLogDir: File = null diff --git a/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala b/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala index 7688f26..77e17e1 100755 --- a/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala +++ b/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala @@ -27,10 +27,10 @@ import kafka.serializer.StringEncoder import java.io.File -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import org.junit.Assert._ -class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness { +class LogRecoveryTest extends ZooKeeperTestHarness { val replicaLagTimeMaxMs = 5000L val replicaLagMaxMessages = 10L @@ -69,6 +69,7 @@ class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness { keyEncoder = classOf[IntEncoder].getName) } + @Before override def setUp() { super.setUp() @@ -86,6 +87,7 @@ class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness { updateProducer() } + @After override def tearDown() { producer.close() for(server <- servers) { diff --git a/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala b/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala index 39a6852..f846698 100755 --- a/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala +++ b/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala @@ -25,7 +25,6 @@ import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness import org.junit.{After, Before, Test} -import org.scalatest.junit.JUnit3Suite import java.util.Properties import java.io.File @@ -33,9 +32,9 @@ import java.io.File import scala.util.Random import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ -class OffsetCommitTest extends JUnit3Suite with ZooKeeperTestHarness { +class OffsetCommitTest extends ZooKeeperTestHarness { val random: Random = new Random() val group = "test-group" val retentionCheckInterval: Long = 100L diff --git a/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala b/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala index a3a03db..dead087 100644 --- a/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala +++ b/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala @@ -17,7 +17,7 @@ package kafka.server -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils._ import kafka.producer.KeyedMessage @@ -25,11 +25,12 @@ import kafka.serializer.StringEncoder import kafka.utils.{TestUtils} import kafka.common._ -class ReplicaFetchTest extends JUnit3Suite with ZooKeeperTestHarness { +class ReplicaFetchTest extends ZooKeeperTestHarness { var brokers: Seq[KafkaServer] = null val topic1 = "foo" val topic2 = "bar" + @Before override def setUp() { super.setUp() brokers = createBrokerConfigs(2, zkConnect, false) @@ -37,6 +38,7 @@ class ReplicaFetchTest extends JUnit3Suite with ZooKeeperTestHarness { .map(config => TestUtils.createServer(config)) } + @After override def tearDown() { brokers.foreach(_.shutdown()) super.tearDown() diff --git a/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala b/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala index 00d5933..3770cb4 100644 --- a/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala +++ b/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala @@ -27,12 +27,11 @@ import java.io.File import org.apache.kafka.common.protocol.Errors import org.easymock.EasyMock import org.I0Itec.zkclient.ZkClient -import org.scalatest.junit.JUnit3Suite import org.junit.Test import scala.collection.Map -class ReplicaManagerTest extends JUnit3Suite { +class ReplicaManagerTest { val topic = "test-topic" @@ -84,7 +83,7 @@ class ReplicaManagerTest extends JUnit3Suite { rm.appendMessages(timeout = 0, requiredAcks = 3, internalTopicsAllowed = false, messagesPerPartition = produceRequest.data, responseCallback = callback) - rm.shutdown(false); + rm.shutdown(false) TestUtils.verifyNonDaemonThreadsStatus diff --git a/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala b/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala index 12269cd..1185a6f 100755 --- a/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala +++ b/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala @@ -20,18 +20,18 @@ import java.util.Properties import kafka.zk.ZooKeeperTestHarness import kafka.utils.{TestUtils, CoreUtils} -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.{Before, Test} +import org.junit.Assert._ import java.io.File -class ServerGenerateBrokerIdTest extends JUnit3Suite with ZooKeeperTestHarness { +class ServerGenerateBrokerIdTest extends ZooKeeperTestHarness { var props1: Properties = null var config1: KafkaConfig = null var props2: Properties = null var config2: KafkaConfig = null val brokerMetaPropsFile = "meta.properties" + @Before override def setUp() { super.setUp() props1 = TestUtils.createBrokerConfig(-1, zkConnect) diff --git a/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala b/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala index 95534e3..2a8da0c 100755 --- a/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala +++ b/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala @@ -27,18 +27,18 @@ import kafka.serializer.StringEncoder import java.io.File -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.{Before, Test} +import org.junit.Assert._ -class ServerShutdownTest extends JUnit3Suite with ZooKeeperTestHarness { +class ServerShutdownTest extends ZooKeeperTestHarness { var config: KafkaConfig = null val host = "localhost" val topic = "test" val sent1 = List("hello", "there") val sent2 = List("more", "messages") - override def setUp(): Unit = { + @Before + override def setUp() { super.setUp() val props = TestUtils.createBrokerConfig(0, zkConnect) config = KafkaConfig.fromProps(props) diff --git a/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala b/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala index 60e10b3..7d986ad 100755 --- a/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala +++ b/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala @@ -17,15 +17,14 @@ package kafka.server -import org.scalatest.junit.JUnit3Suite import kafka.utils.ZkUtils import kafka.utils.CoreUtils import kafka.utils.TestUtils import kafka.zk.ZooKeeperTestHarness -import junit.framework.Assert._ +import org.junit.Assert._ -class ServerStartupTest extends JUnit3Suite with ZooKeeperTestHarness { +class ServerStartupTest extends ZooKeeperTestHarness { def testBrokerCreatesZKChroot { val brokerId = 0 diff --git a/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala b/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala index 09a0961..d950665 100644 --- a/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala +++ b/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala @@ -22,18 +22,17 @@ import kafka.cluster.Replica import kafka.common.TopicAndPartition import kafka.log.Log import kafka.message.{MessageSet, ByteBufferMessageSet, Message} +import org.junit.{After, Before} -import scala.Some import java.util.{Properties, Collections} import java.util.concurrent.atomic.AtomicBoolean import collection.JavaConversions._ import org.easymock.EasyMock import org.I0Itec.zkclient.ZkClient -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.Assert._ -class SimpleFetchTest extends JUnit3Suite { +class SimpleFetchTest { val replicaLagTimeMaxMs = 100L val replicaFetchWaitMaxMs = 100 @@ -63,9 +62,8 @@ class SimpleFetchTest extends JUnit3Suite { var replicaManager: ReplicaManager = null - override def setUp() { - super.setUp() - + @Before + def setUp() { // create nice mock since we don't particularly care about zkclient calls val zkClient = EasyMock.createNiceMock(classOf[ZkClient]) EasyMock.replay(zkClient) @@ -117,9 +115,9 @@ class SimpleFetchTest extends JUnit3Suite { partition.inSyncReplicas = allReplicas.toSet } - override def tearDown() { + @After + def tearDown() { replicaManager.shutdown(false) - super.tearDown() } /** diff --git a/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala b/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala index fd8cf7b..4a070bd 100644 --- a/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala +++ b/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala @@ -19,7 +19,7 @@ package kafka.utils import java.util.concurrent.TimeUnit -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test class ByteBoundedBlockingQueueTest { diff --git a/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala b/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala index 6380b6e..068526e 100644 --- a/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala +++ b/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala @@ -17,7 +17,7 @@ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test class CommandLineUtilsTest { diff --git a/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala b/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala index fbd245c..56f5905 100644 --- a/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala +++ b/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.Assertions import org.junit.{Test, After, Before} diff --git a/core/src/test/scala/unit/kafka/utils/JsonTest.scala b/core/src/test/scala/unit/kafka/utils/JsonTest.scala index 93550e8..6c8ed97 100644 --- a/core/src/test/scala/unit/kafka/utils/JsonTest.scala +++ b/core/src/test/scala/unit/kafka/utils/JsonTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.{Test, After, Before} class JsonTest { diff --git a/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala b/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala index b9de8d6..b3835f0 100644 --- a/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala +++ b/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala @@ -22,13 +22,12 @@ import kafka.server.{ReplicaFetcherManager, KafkaConfig} import kafka.api.LeaderAndIsr import kafka.zk.ZooKeeperTestHarness import kafka.common.TopicAndPartition -import org.scalatest.junit.JUnit3Suite import org.junit.Assert._ -import org.junit.Test +import org.junit.{Before, Test} import org.easymock.EasyMock -class ReplicationUtilsTest extends JUnit3Suite with ZooKeeperTestHarness { +class ReplicationUtilsTest extends ZooKeeperTestHarness { val topic = "my-topic-test" val partitionId = 0 val brokerId = 1 @@ -45,7 +44,7 @@ class ReplicationUtilsTest extends JUnit3Suite with ZooKeeperTestHarness { val topicDataLeaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(1,leaderEpoch,List(1,2),0), controllerEpoch) - + @Before override def setUp() { super.setUp() ZkUtils.createPersistentPath(zkClient,topicPath,topicData) diff --git a/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala b/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala index cfea63b..7c131fc 100644 --- a/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala +++ b/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ import org.junit.{Test, After, Before} import kafka.utils.TestUtils.retry diff --git a/core/src/test/scala/unit/kafka/utils/TestUtils.scala b/core/src/test/scala/unit/kafka/utils/TestUtils.scala index 17e9fe4..a241473 100755 --- a/core/src/test/scala/unit/kafka/utils/TestUtils.scala +++ b/core/src/test/scala/unit/kafka/utils/TestUtils.scala @@ -43,8 +43,7 @@ import kafka.admin.AdminUtils import kafka.producer.ProducerConfig import kafka.log._ -import junit.framework.AssertionFailedError -import junit.framework.Assert._ +import org.junit.Assert._ import org.apache.kafka.clients.producer.KafkaProducer import scala.collection.Map @@ -594,7 +593,7 @@ object TestUtils extends Logging { block return } catch { - case e: AssertionFailedError => + case e: AssertionError => val ellapsed = System.currentTimeMillis - startTime if(ellapsed > maxWaitMs) { throw e diff --git a/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala b/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala index 052aecd..a018dde 100644 --- a/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala +++ b/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils.timer -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ import org.junit.{Test, After, Before} diff --git a/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala b/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala index 8507592..95de378 100644 --- a/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala +++ b/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala @@ -18,7 +18,7 @@ package kafka.utils.timer import java.util.concurrent.{CountDownLatch, ExecutorService, Executors, TimeUnit} -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ import org.junit.{Test, After, Before} diff --git a/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala b/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala index 2be1619..247aa6e 100644 --- a/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala +++ b/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala @@ -18,13 +18,11 @@ package kafka.zk import kafka.consumer.ConsumerConfig -import org.I0Itec.zkclient.ZkClient import kafka.utils.ZkUtils import kafka.utils.TestUtils import org.junit.Assert -import org.scalatest.junit.JUnit3Suite -class ZKEphemeralTest extends JUnit3Suite with ZooKeeperTestHarness { +class ZKEphemeralTest extends ZooKeeperTestHarness { var zkSessionTimeoutMs = 1000 def testEphemeralNodeCleanup = { diff --git a/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala b/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala index d3e44c6..35c635a 100644 --- a/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala +++ b/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala @@ -17,13 +17,12 @@ package kafka.zk -import junit.framework.Assert import kafka.consumer.ConsumerConfig import kafka.utils.{ZkPath, TestUtils, ZkUtils} import org.apache.kafka.common.config.ConfigException -import org.scalatest.junit.JUnit3Suite +import org.junit.Assert._ -class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { +class ZKPathTest extends ZooKeeperTestHarness { val path: String = "/some_dir" val zkSessionTimeoutMs = 1000 @@ -54,7 +53,7 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create persistent path") } - Assert.assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) + assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) } def testMakeSurePersistsPathExistsThrowsException { @@ -82,7 +81,7 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create persistent path") } - Assert.assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) + assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) } def testCreateEphemeralPathThrowsException { @@ -110,7 +109,7 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create ephemeral path") } - Assert.assertTrue("Failed to create ephemeral path", ZkUtils.pathExists(zkClient, path)) + assertTrue("Failed to create ephemeral path", ZkUtils.pathExists(zkClient, path)) } def testCreatePersistentSequentialThrowsException { @@ -140,6 +139,6 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create persistent path") } - Assert.assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, actualPath)) + assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, actualPath)) } } diff --git a/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala b/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala index 1f4d10d..e4bfb48 100755 --- a/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala +++ b/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala @@ -17,11 +17,12 @@ package kafka.zk -import org.scalatest.junit.JUnit3Suite import org.I0Itec.zkclient.ZkClient import kafka.utils.{ZkUtils, CoreUtils} +import org.junit.{After, Before} +import org.scalatest.junit.JUnitSuite -trait ZooKeeperTestHarness extends JUnit3Suite { +trait ZooKeeperTestHarness extends JUnitSuite { var zkPort: Int = -1 var zookeeper: EmbeddedZookeeper = null var zkClient: ZkClient = null @@ -30,17 +31,17 @@ trait ZooKeeperTestHarness extends JUnit3Suite { def zkConnect: String = "127.0.0.1:" + zkPort - override def setUp() { - super.setUp + @Before + def setUp() { zookeeper = new EmbeddedZookeeper() zkPort = zookeeper.port zkClient = ZkUtils.createZkClient(zkConnect, zkSessionTimeout, zkConnectionTimeout) } - override def tearDown() { + @After + def tearDown() { CoreUtils.swallow(zkClient.close()) CoreUtils.swallow(zookeeper.shutdown()) - super.tearDown } } -- 2.4.5