From 234b71440ed24a8c92f59fbf21fae63d79d81516 Mon Sep 17 00:00:00 2001 From: Alexander Pakulov Date: Thu, 18 Jun 2015 11:01:14 -0700 Subject: [PATCH] KAFKA-1782; Junit3 Misusage --- .../integration/kafka/api/ConsumerBounceTest.scala | 2 ++ .../scala/integration/kafka/api/ConsumerTest.scala | 5 ++- .../kafka/api/IntegrationTestHarness.scala | 5 ++- .../integration/kafka/api/ProducerBounceTest.scala | 4 ++- .../kafka/api/ProducerCompressionTest.scala | 3 +- .../kafka/api/ProducerFailureHandlingTest.scala | 13 ++++--- .../integration/kafka/api/ProducerSendTest.scala | 13 +++---- .../test/scala/unit/kafka/KafkaConfigTest.scala | 2 +- .../scala/unit/kafka/admin/AddPartitionsTest.scala | 8 +++-- .../test/scala/unit/kafka/admin/AdminTest.scala | 7 ++-- .../unit/kafka/admin/DeleteConsumerGroupTest.scala | 3 +- .../scala/unit/kafka/admin/DeleteTopicTest.scala | 5 ++- .../scala/unit/kafka/admin/TopicCommandTest.scala | 8 ++--- .../test/scala/unit/kafka/api/ApiUtilsTest.scala | 2 +- .../api/RequestResponseSerializationTest.scala | 2 +- .../unit/kafka/cluster/BrokerEndPointTest.scala | 3 +- .../test/scala/unit/kafka/common/ConfigTest.scala | 2 +- .../test/scala/unit/kafka/common/TopicTest.scala | 2 +- .../unit/kafka/consumer/ConsumerIteratorTest.scala | 9 +++-- .../kafka/consumer/PartitionAssignorTest.scala | 5 ++- .../unit/kafka/consumer/TopicFilterTest.scala | 2 +- .../consumer/ZookeeperConsumerConnectorTest.scala | 8 +++-- .../coordinator/ConsumerGroupMetadataTest.scala | 2 +- .../coordinator/CoordinatorMetadataTest.scala | 2 +- .../kafka/coordinator/PartitionAssignorTest.scala | 2 +- .../kafka/integration/AutoOffsetResetTest.scala | 9 ++--- .../scala/unit/kafka/integration/FetcherTest.scala | 11 +++--- .../kafka/integration/KafkaServerTestHarness.scala | 8 +++-- .../unit/kafka/integration/MinIsrConfigTest.scala | 3 +- .../unit/kafka/integration/PrimitiveApiTest.scala | 5 ++- .../integration/ProducerConsumerTestHarness.scala | 40 ++++++++++++---------- .../unit/kafka/integration/RollingBounceTest.scala | 8 +++-- .../unit/kafka/integration/TopicMetadataTest.scala | 8 +++-- .../integration/UncleanLeaderElectionTest.scala | 10 +++--- .../consumer/ZookeeperConsumerConnectorTest.scala | 6 ++-- .../javaapi/message/BaseMessageSetTestCases.scala | 2 +- .../javaapi/message/ByteBufferMessageSetTest.scala | 2 +- .../test/scala/unit/kafka/log/CleanerTest.scala | 2 +- .../scala/unit/kafka/log/FileMessageSetTest.scala | 2 +- .../unit/kafka/log/LogCleanerIntegrationTest.scala | 4 +-- .../test/scala/unit/kafka/log/LogConfigTest.scala | 4 +-- .../test/scala/unit/kafka/log/LogManagerTest.scala | 17 +++++---- .../test/scala/unit/kafka/log/LogSegmentTest.scala | 8 ++--- core/src/test/scala/unit/kafka/log/LogTest.scala | 2 +- .../scala/unit/kafka/log/OffsetIndexTest.scala | 2 +- .../test/scala/unit/kafka/log/OffsetMapTest.scala | 2 +- .../unit/kafka/log4j/KafkaLog4jAppenderTest.scala | 5 ++- .../kafka/message/BaseMessageSetTestCases.scala | 2 +- .../kafka/message/ByteBufferMessageSetTest.scala | 2 +- .../kafka/message/MessageCompressionTest.scala | 2 +- .../scala/unit/kafka/message/MessageTest.scala | 2 +- .../unit/kafka/message/MessageWriterTest.scala | 2 +- .../scala/unit/kafka/metrics/KafkaTimerTest.scala | 5 ++- .../scala/unit/kafka/metrics/MetricsTest.scala | 6 ++-- .../unit/kafka/network/SocketServerTest.scala | 10 +++--- .../unit/kafka/producer/AsyncProducerTest.scala | 15 ++------ .../scala/unit/kafka/producer/ProducerTest.scala | 27 +++++++-------- .../unit/kafka/producer/SyncProducerTest.scala | 5 ++- .../unit/kafka/server/AdvertiseBrokerTest.scala | 8 +++-- .../unit/kafka/server/DelayedOperationTest.scala | 15 ++++---- .../kafka/server/DynamicConfigChangeTest.scala | 5 ++- .../server/HighwatermarkPersistenceTest.scala | 3 +- .../unit/kafka/server/ISRExpirationTest.scala | 12 +++---- .../kafka/server/KafkaConfigConfigDefTest.scala | 4 +-- .../scala/unit/kafka/server/KafkaConfigTest.scala | 6 ++-- .../unit/kafka/server/LeaderElectionTest.scala | 8 +++-- .../scala/unit/kafka/server/LogOffsetTest.scala | 5 ++- .../scala/unit/kafka/server/LogRecoveryTest.scala | 6 ++-- .../scala/unit/kafka/server/OffsetCommitTest.scala | 5 ++- .../scala/unit/kafka/server/ReplicaFetchTest.scala | 6 ++-- .../unit/kafka/server/ReplicaManagerTest.scala | 5 ++- .../kafka/server/ServerGenerateBrokerIdTest.scala | 8 ++--- .../unit/kafka/server/ServerShutdownTest.scala | 14 ++++---- .../unit/kafka/server/ServerStartupTest.scala | 5 ++- .../scala/unit/kafka/server/SimpleFetchTest.scala | 16 ++++----- .../kafka/utils/ByteBoundedBlockingQueueTest.scala | 2 +- .../unit/kafka/utils/CommandLineUtilsTest.scala | 2 +- .../unit/kafka/utils/IteratorTemplateTest.scala | 2 +- .../src/test/scala/unit/kafka/utils/JsonTest.scala | 2 +- .../unit/kafka/utils/ReplicationUtilsTest.scala | 7 ++-- .../scala/unit/kafka/utils/SchedulerTest.scala | 2 +- .../test/scala/unit/kafka/utils/TestUtils.scala | 5 ++- .../unit/kafka/utils/timer/TimerTaskListTest.scala | 2 +- .../scala/unit/kafka/utils/timer/TimerTest.scala | 2 +- .../test/scala/unit/kafka/zk/ZKEphemeralTest.scala | 4 +-- core/src/test/scala/unit/kafka/zk/ZKPathTest.scala | 13 ++++--- .../scala/unit/kafka/zk/ZooKeeperTestHarness.scala | 12 +++---- 87 files changed, 266 insertions(+), 277 deletions(-) diff --git a/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala b/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala index f56096b..32be00c 100644 --- a/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala +++ b/core/src/test/scala/integration/kafka/api/ConsumerBounceTest.scala @@ -23,6 +23,7 @@ import org.apache.kafka.common.TopicPartition import kafka.utils.{ShutdownableThread, TestUtils, Logging} import org.junit.Assert._ +import org.junit.Before import scala.collection.JavaConversions._ @@ -55,6 +56,7 @@ class ConsumerBounceTest extends IntegrationTestHarness with Logging { .map(KafkaConfig.fromProps(_, serverConfig)) } + @Before override def setUp() { super.setUp() diff --git a/core/src/test/scala/integration/kafka/api/ConsumerTest.scala b/core/src/test/scala/integration/kafka/api/ConsumerTest.scala index 17b17b9..6d9acd5 100644 --- a/core/src/test/scala/integration/kafka/api/ConsumerTest.scala +++ b/core/src/test/scala/integration/kafka/api/ConsumerTest.scala @@ -29,6 +29,8 @@ import kafka.server.{KafkaConfig, OffsetManager} import java.util.ArrayList import org.junit.Assert._ +import org.junit.Before +import org.scalatest.Assertions._ import scala.collection.JavaConversions._ @@ -55,7 +57,8 @@ class ConsumerTest extends IntegrationTestHarness with Logging { this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "my-test") this.consumerConfig.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString) this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") - + + @Before override def setUp() { super.setUp() diff --git a/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala b/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala index 07b1ff4..316d6ba 100644 --- a/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala +++ b/core/src/test/scala/integration/kafka/api/IntegrationTestHarness.scala @@ -25,6 +25,7 @@ import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.clients.producer.KafkaProducer import kafka.server.{OffsetManager, KafkaConfig} import kafka.integration.KafkaServerTestHarness +import org.junit.{After, Before} import scala.collection.mutable.Buffer /** @@ -48,6 +49,7 @@ trait IntegrationTestHarness extends KafkaServerTestHarness { cfgs.map(KafkaConfig.fromProps) } + @Before override def setUp() { super.setUp() producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapUrl) @@ -69,7 +71,8 @@ trait IntegrationTestHarness extends KafkaServerTestHarness { servers, servers(0).offsetManager.offsetsTopicConfig) } - + + @After override def tearDown() { producers.foreach(_.close()) consumers.foreach(_.close()) diff --git a/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala b/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala index ce70a0a..2dbb9dc 100644 --- a/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala @@ -22,7 +22,7 @@ import kafka.utils.{ShutdownableThread, TestUtils} import org.apache.kafka.clients.producer._ import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback import org.junit.Assert._ -import org.junit.Test +import org.junit.{After, Before, Test} class ProducerBounceTest extends KafkaServerTestHarness { private val producerBufferSize = 30000 @@ -62,6 +62,7 @@ class ProducerBounceTest extends KafkaServerTestHarness { private val topic1 = "topic-1" private val topic2 = "topic-2" + @Before override def setUp() { super.setUp() @@ -70,6 +71,7 @@ class ProducerBounceTest extends KafkaServerTestHarness { producer3 = TestUtils.createNewProducer(brokerList, acks = -1, blockOnBufferFull = false, bufferSize = producerBufferSize) } + @After override def tearDown() { if (producer1 != null) producer1.close if (producer2 != null) producer2.close diff --git a/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala b/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala index 83de81c..87db255 100755 --- a/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerCompressionTest.scala @@ -19,7 +19,6 @@ package kafka.api.test import java.util.{Properties, Collection, ArrayList} -import org.scalatest.junit.JUnit3Suite import org.junit.runners.Parameterized import org.junit.runner.RunWith import org.junit.runners.Parameterized.Parameters @@ -36,7 +35,7 @@ import kafka.utils.{CoreUtils, TestUtils} @RunWith(value = classOf[Parameterized]) -class ProducerCompressionTest(compression: String) extends JUnit3Suite with ZooKeeperTestHarness { +class ProducerCompressionTest(compression: String) extends ZooKeeperTestHarness { private val brokerId = 0 private var server: KafkaServer = null diff --git a/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala b/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala index ee94011..426ce11 100644 --- a/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala @@ -17,8 +17,9 @@ package kafka.api -import org.junit.Test +import org.junit.{After, Before, Assert, Test} import org.junit.Assert._ +import org.scalatest.Assertions._ import java.util.{Properties, Random} import java.util.concurrent.{TimeoutException, TimeUnit, ExecutionException} @@ -61,6 +62,7 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { private val topic1 = "topic-1" private val topic2 = "topic-2" + @Before override def setUp() { super.setUp() @@ -69,6 +71,7 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { producer3 = TestUtils.createNewProducer(brokerList, acks = -1, blockOnBufferFull = false, bufferSize = producerBufferSize) } + @After override def tearDown() { if (producer1 != null) producer1.close if (producer2 != null) producer2.close @@ -257,11 +260,11 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { val record = new ProducerRecord[Array[Byte],Array[Byte]](topicName, null, "key".getBytes, "value".getBytes) try { producer3.send(record).get - fail("Expected exception when producing to topic with fewer brokers than min.insync.replicas") + Assert.fail("Expected exception when producing to topic with fewer brokers than min.insync.replicas") } catch { case e: ExecutionException => if (!e.getCause.isInstanceOf[NotEnoughReplicasException]) { - fail("Expected NotEnoughReplicasException when producing to topic with fewer brokers than min.insync.replicas") + Assert.fail("Expected NotEnoughReplicasException when producing to topic with fewer brokers than min.insync.replicas") } } } @@ -283,12 +286,12 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { servers.head.awaitShutdown() try { producer3.send(record).get - fail("Expected exception when producing to topic with fewer brokers than min.insync.replicas") + Assert.fail("Expected exception when producing to topic with fewer brokers than min.insync.replicas") } catch { case e: ExecutionException => if (!e.getCause.isInstanceOf[NotEnoughReplicasException] && !e.getCause.isInstanceOf[NotEnoughReplicasAfterAppendException]) { - fail("Expected NotEnoughReplicasException or NotEnoughReplicasAfterAppendException when producing to topic " + + Assert.fail("Expected NotEnoughReplicasException or NotEnoughReplicasAfterAppendException when producing to topic " + "with fewer brokers than min.insync.replicas, but saw " + e.getCause) } } diff --git a/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala b/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala index 9ce4bd5..398ae0d 100644 --- a/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala +++ b/core/src/test/scala/integration/kafka/api/ProducerSendTest.scala @@ -29,12 +29,11 @@ import org.apache.kafka.clients.producer._ import org.apache.kafka.common.config.ConfigException import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.ByteArraySerializer +import org.junit.{Before, After, Test} import org.junit.Assert._ -import org.junit.Test -import org.scalatest.junit.JUnit3Suite +import org.scalatest.Assertions - -class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { +class ProducerSendTest extends KafkaServerTestHarness { val numServers = 2 val overridingProps = new Properties() @@ -49,6 +48,7 @@ class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { private val topic = "topic" private val numRecords = 100 + @Before override def setUp() { super.setUp() @@ -57,6 +57,7 @@ class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { consumer2 = new SimpleConsumer("localhost", servers(1).boundPort(), 100, 1024*1024, "") } + @After override def tearDown() { consumer1.close() consumer2.close() @@ -84,7 +85,7 @@ class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { assertEquals(partition, metadata.partition()) offset += 1 } else { - fail("Send callback returns the following exception", exception) + Assertions.fail("Send callback returns the following exception", exception) } } } @@ -116,7 +117,7 @@ class ProducerSendTest extends JUnit3Suite with KafkaServerTestHarness { fail("Should not allow sending a record without topic") } catch { case iae: IllegalArgumentException => // this is ok - case e: Throwable => fail("Only expecting IllegalArgumentException", e) + case e: Throwable => Assertions.fail("Only expecting IllegalArgumentException", e) } // non-blocking send a list of records diff --git a/core/src/test/scala/unit/kafka/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/KafkaConfigTest.scala index 4764c89..1233104 100644 --- a/core/src/test/scala/unit/kafka/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/KafkaConfigTest.scala @@ -21,7 +21,7 @@ import java.security.Permission import kafka.server.KafkaConfig import org.junit.{After, Before, Test} -import junit.framework.Assert._ +import org.junit.Assert._ class KafkaTest { diff --git a/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala b/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala index df5c6ba..42d655f 100755 --- a/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala +++ b/core/src/test/scala/unit/kafka/admin/AddPartitionsTest.scala @@ -17,17 +17,17 @@ package kafka.admin +import org.junit.Assert._ import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils._ -import junit.framework.Assert._ import kafka.utils.{ZkUtils, CoreUtils, TestUtils} import kafka.cluster.Broker import kafka.client.ClientUtils import kafka.server.{KafkaConfig, KafkaServer} +import org.junit.{After, Before} -class AddPartitionsTest extends JUnit3Suite with ZooKeeperTestHarness { +class AddPartitionsTest extends ZooKeeperTestHarness { var configs: Seq[KafkaConfig] = null var servers: Seq[KafkaServer] = Seq.empty[KafkaServer] var brokers: Seq[Broker] = Seq.empty[Broker] @@ -39,6 +39,7 @@ class AddPartitionsTest extends JUnit3Suite with ZooKeeperTestHarness { val topic3 = "new-topic3" val topic4 = "new-topic4" + @Before override def setUp() { super.setUp() @@ -54,6 +55,7 @@ class AddPartitionsTest extends JUnit3Suite with ZooKeeperTestHarness { createTopic(zkClient, topic4, partitionReplicaAssignment = Map(0->Seq(0,3)), servers = servers) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/admin/AdminTest.scala b/core/src/test/scala/unit/kafka/admin/AdminTest.scala index efb2f8e..73821ce 100755 --- a/core/src/test/scala/unit/kafka/admin/AdminTest.scala +++ b/core/src/test/scala/unit/kafka/admin/AdminTest.scala @@ -16,9 +16,7 @@ */ package kafka.admin -import junit.framework.Assert._ import org.junit.Test -import org.scalatest.junit.JUnit3Suite import java.util.Properties import kafka.utils._ import kafka.log._ @@ -28,9 +26,10 @@ import kafka.common.{TopicExistsException, TopicAndPartition} import kafka.server.{KafkaServer, KafkaConfig} import java.io.File import TestUtils._ +import org.junit.Assert._ +import org.scalatest.Assertions._ - -class AdminTest extends JUnit3Suite with ZooKeeperTestHarness with Logging { +class AdminTest extends ZooKeeperTestHarness with Logging { @Test def testReplicaAssignment() { diff --git a/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala b/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala index 1913ad6..d3abf08 100644 --- a/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala +++ b/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala @@ -16,7 +16,6 @@ */ package kafka.admin -import org.scalatest.junit.JUnit3Suite import kafka.utils._ import kafka.server.KafkaConfig import org.junit.Test @@ -25,7 +24,7 @@ import org.apache.kafka.clients.producer.{ProducerRecord, KafkaProducer} import kafka.integration.KafkaServerTestHarness -class DeleteConsumerGroupTest extends JUnit3Suite with KafkaServerTestHarness { +class DeleteConsumerGroupTest extends KafkaServerTestHarness { def generateConfigs() = TestUtils.createBrokerConfigs(3, zkConnect, false, true).map(KafkaConfig.fromProps) @Test diff --git a/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala b/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala index fa8ce25..6279180 100644 --- a/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala +++ b/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala @@ -17,16 +17,15 @@ package kafka.admin import kafka.log.Log -import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness -import junit.framework.Assert._ import kafka.utils.{ZkUtils, TestUtils} import kafka.server.{KafkaServer, KafkaConfig} import org.junit.Test +import org.junit.Assert._ import java.util.Properties import kafka.common.TopicAndPartition -class DeleteTopicTest extends JUnit3Suite with ZooKeeperTestHarness { +class DeleteTopicTest extends ZooKeeperTestHarness { @Test def testDeleteTopicWithAllAliveReplicas() { diff --git a/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala b/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala index c7136f2..d1b4673 100644 --- a/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala +++ b/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala @@ -16,17 +16,17 @@ */ package kafka.admin -import junit.framework.Assert._ import org.junit.Test -import org.scalatest.junit.JUnit3Suite import kafka.utils.Logging import kafka.utils.TestUtils import kafka.zk.ZooKeeperTestHarness -import kafka.server.{OffsetManager, KafkaConfig} +import kafka.server.{OffsetManager} import kafka.admin.TopicCommand.TopicCommandOptions import kafka.utils.ZkUtils +import org.junit.Assert._ +import org.scalatest.Assertions._ -class TopicCommandTest extends JUnit3Suite with ZooKeeperTestHarness with Logging { +class TopicCommandTest extends ZooKeeperTestHarness with Logging { @Test def testConfigPreservationAcrossPartitionAlteration() { diff --git a/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala b/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala index 2554425..fff3e7b 100644 --- a/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala +++ b/core/src/test/scala/unit/kafka/api/ApiUtilsTest.scala @@ -19,7 +19,7 @@ package kafka.api import org.junit._ import org.scalatest.junit.JUnitSuite -import junit.framework.Assert._ +import org.junit.Assert._ import scala.util.Random import java.nio.ByteBuffer import kafka.common.KafkaException diff --git a/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala b/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala index 5717165..b4c2a22 100644 --- a/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala +++ b/core/src/test/scala/unit/kafka/api/RequestResponseSerializationTest.scala @@ -32,7 +32,7 @@ import java.nio.ByteBuffer import org.apache.kafka.common.protocol.SecurityProtocol import org.junit._ import org.scalatest.junit.JUnitSuite -import junit.framework.Assert._ +import org.junit.Assert._ object SerializationTestUtils { diff --git a/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala b/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala index abe511f..2d3a9c3 100644 --- a/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala +++ b/core/src/test/scala/unit/kafka/cluster/BrokerEndPointTest.scala @@ -22,11 +22,10 @@ import java.nio.ByteBuffer import kafka.utils.Logging import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.Test -import org.scalatest.junit.JUnit3Suite import scala.collection.mutable -class BrokerEndPointTest extends JUnit3Suite with Logging { +class BrokerEndPointTest extends Logging { @Test def testSerDe() = { diff --git a/core/src/test/scala/unit/kafka/common/ConfigTest.scala b/core/src/test/scala/unit/kafka/common/ConfigTest.scala index 0aca938..a42836c 100644 --- a/core/src/test/scala/unit/kafka/common/ConfigTest.scala +++ b/core/src/test/scala/unit/kafka/common/ConfigTest.scala @@ -17,7 +17,7 @@ package kafka.common -import junit.framework.Assert._ +import org.junit.Assert._ import collection.mutable.ArrayBuffer import org.junit.Test import kafka.producer.ProducerConfig diff --git a/core/src/test/scala/unit/kafka/common/TopicTest.scala b/core/src/test/scala/unit/kafka/common/TopicTest.scala index 79532c8..137dcc4 100644 --- a/core/src/test/scala/unit/kafka/common/TopicTest.scala +++ b/core/src/test/scala/unit/kafka/common/TopicTest.scala @@ -17,7 +17,7 @@ package kafka.common -import junit.framework.Assert._ +import org.junit.Assert._ import collection.mutable.ArrayBuffer import org.junit.Test diff --git a/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala b/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala index db5302f..ca63c80 100755 --- a/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/ConsumerIteratorTest.scala @@ -18,22 +18,20 @@ package kafka.consumer -import java.util.Properties import java.util.concurrent._ import java.util.concurrent.atomic._ import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.message._ import kafka.server._ import kafka.utils.TestUtils._ import kafka.utils._ -import org.junit.Test +import org.junit.{Before, Test} import kafka.serializer._ -import org.scalatest.junit.JUnit3Suite import kafka.integration.KafkaServerTestHarness -class ConsumerIteratorTest extends JUnit3Suite with KafkaServerTestHarness { +class ConsumerIteratorTest extends KafkaServerTestHarness { val numNodes = 1 @@ -49,6 +47,7 @@ class ConsumerIteratorTest extends JUnit3Suite with KafkaServerTestHarness { def consumerConfig = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer0)) + @Before override def setUp() { super.setUp() topicInfos = configs.map(c => new PartitionTopicInfo(topic, diff --git a/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala b/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala index adf0801..c1071b8 100644 --- a/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/PartitionAssignorTest.scala @@ -17,18 +17,17 @@ package kafka.consumer -import org.scalatest.junit.JUnit3Suite import org.easymock.EasyMock import org.I0Itec.zkclient.ZkClient import org.apache.zookeeper.data.Stat import kafka.utils.{TestUtils, Logging, ZkUtils, Json} -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.common.TopicAndPartition import kafka.consumer.PartitionAssignorTest.StaticSubscriptionInfo import kafka.consumer.PartitionAssignorTest.Scenario import kafka.consumer.PartitionAssignorTest.WildcardSubscriptionInfo -class PartitionAssignorTest extends JUnit3Suite with Logging { +class PartitionAssignorTest extends Logging { def testRoundRobinPartitionAssignor() { val assignor = new RoundRobinAssignor diff --git a/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala b/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala index 4f124af..db0ebfa 100644 --- a/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala @@ -18,7 +18,7 @@ package kafka.consumer -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.Test import kafka.server.OffsetManager diff --git a/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala b/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala index 359b0f5..c851e27 100644 --- a/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala +++ b/core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala @@ -19,7 +19,7 @@ package kafka.consumer import java.util.{Collections, Properties} -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.common.MessageStreamsExistException import kafka.integration.KafkaServerTestHarness import kafka.javaapi.consumer.ConsumerRebalanceListener @@ -30,11 +30,11 @@ import kafka.utils.TestUtils._ import kafka.utils._ import org.I0Itec.zkclient.ZkClient import org.apache.log4j.{Level, Logger} -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import scala.collection._ -class ZookeeperConsumerConnectorTest extends JUnit3Suite with KafkaServerTestHarness with Logging { +class ZookeeperConsumerConnectorTest extends KafkaServerTestHarness with Logging { val RebalanceBackoffMs = 5000 var dirs : ZKGroupTopicDirs = null @@ -54,11 +54,13 @@ class ZookeeperConsumerConnectorTest extends JUnit3Suite with KafkaServerTestHar val consumer3 = "consumer3" val nMessages = 2 + @Before override def setUp() { super.setUp() dirs = new ZKGroupTopicDirs(group, topic) } + @After override def tearDown() { super.tearDown() } diff --git a/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala b/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala index b69c993..5d812c2 100644 --- a/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/ConsumerGroupMetadataTest.scala @@ -17,7 +17,7 @@ package kafka.coordinator -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.{Before, Test} import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala b/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala index 08854c5..8ffca13 100644 --- a/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/CoordinatorMetadataTest.scala @@ -20,7 +20,7 @@ package kafka.coordinator import kafka.server.KafkaConfig import kafka.utils.{ZkUtils, TestUtils} -import junit.framework.Assert._ +import org.junit.Assert._ import org.I0Itec.zkclient.{IZkDataListener, ZkClient} import org.apache.zookeeper.data.Stat import org.easymock.EasyMock diff --git a/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala b/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala index 887cee5..79c691f 100644 --- a/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala +++ b/core/src/test/scala/unit/kafka/coordinator/PartitionAssignorTest.scala @@ -19,7 +19,7 @@ package kafka.coordinator import kafka.common.TopicAndPartition -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala b/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala index 139dc9a..818673f 100644 --- a/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala +++ b/core/src/test/scala/unit/kafka/integration/AutoOffsetResetTest.scala @@ -24,12 +24,11 @@ import kafka.utils.TestUtils import kafka.serializer._ import kafka.producer.{Producer, KeyedMessage} -import org.junit.Test +import org.junit.{After, Before, Test} import org.apache.log4j.{Level, Logger} -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.Assert._ -class AutoOffsetResetTest extends JUnit3Suite with KafkaServerTestHarness with Logging { +class AutoOffsetResetTest extends KafkaServerTestHarness with Logging { def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0, zkConnect))) @@ -42,12 +41,14 @@ class AutoOffsetResetTest extends JUnit3Suite with KafkaServerTestHarness with L val requestHandlerLogger = Logger.getLogger(classOf[kafka.server.KafkaRequestHandler]) + @Before override def setUp() { super.setUp() // temporarily set request handler logger to a higher level requestHandlerLogger.setLevel(Level.FATAL) } + @After override def tearDown() { // restore set request handler logger to a higher level requestHandlerLogger.setLevel(Level.ERROR) diff --git a/core/src/test/scala/unit/kafka/integration/FetcherTest.scala b/core/src/test/scala/unit/kafka/integration/FetcherTest.scala index facebd8..92af0a1 100644 --- a/core/src/test/scala/unit/kafka/integration/FetcherTest.scala +++ b/core/src/test/scala/unit/kafka/integration/FetcherTest.scala @@ -19,18 +19,17 @@ package kafka.integration import java.util.concurrent._ import java.util.concurrent.atomic._ +import org.junit.{After, Before} + import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.cluster._ import kafka.server._ -import org.scalatest.junit.JUnit3Suite import kafka.consumer._ -import kafka.serializer._ -import kafka.producer.{KeyedMessage, Producer} import kafka.utils.TestUtils -class FetcherTest extends JUnit3Suite with KafkaServerTestHarness { +class FetcherTest extends KafkaServerTestHarness { val numNodes = 1 def generateConfigs() = TestUtils.createBrokerConfigs(numNodes, zkConnect).map(KafkaConfig.fromProps) @@ -40,6 +39,7 @@ class FetcherTest extends JUnit3Suite with KafkaServerTestHarness { var fetcher: ConsumerFetcherManager = null + @Before override def setUp() { super.setUp TestUtils.createTopic(zkClient, topic, partitionReplicaAssignment = Map(0 -> Seq(configs.head.brokerId)), servers = servers) @@ -59,6 +59,7 @@ class FetcherTest extends JUnit3Suite with KafkaServerTestHarness { fetcher.startConnections(topicInfos, cluster) } + @After override def tearDown() { fetcher.stopConnections() super.tearDown diff --git a/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala b/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala index 87c6315..c7e79ca 100755 --- a/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala +++ b/core/src/test/scala/unit/kafka/integration/KafkaServerTestHarness.scala @@ -19,17 +19,18 @@ package kafka.integration import java.util.Arrays +import org.junit.{Before, After} + import scala.collection.mutable.Buffer import kafka.server._ import kafka.utils.{CoreUtils, TestUtils} -import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness import kafka.common.KafkaException /** * A test harness that brings up some number of broker nodes */ -trait KafkaServerTestHarness extends JUnit3Suite with ZooKeeperTestHarness { +trait KafkaServerTestHarness extends ZooKeeperTestHarness { var instanceConfigs: Seq[KafkaConfig] = null var servers: Buffer[KafkaServer] = null var brokerList: String = null @@ -51,7 +52,7 @@ trait KafkaServerTestHarness extends JUnit3Suite with ZooKeeperTestHarness { def bootstrapUrl = servers.map(s => s.config.hostName + ":" + s.boundPort()).mkString(",") - + @Before override def setUp() { super.setUp if(configs.size <= 0) @@ -62,6 +63,7 @@ trait KafkaServerTestHarness extends JUnit3Suite with ZooKeeperTestHarness { Arrays.fill(alive, true) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(_.config.logDirs.foreach(CoreUtils.rm(_))) diff --git a/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala b/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala index a2c9713..3c1cade 100644 --- a/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala +++ b/core/src/test/scala/unit/kafka/integration/MinIsrConfigTest.scala @@ -21,9 +21,8 @@ import java.util.Properties import kafka.server.KafkaConfig import kafka.utils.TestUtils -import org.scalatest.junit.JUnit3Suite -class MinIsrConfigTest extends JUnit3Suite with KafkaServerTestHarness { +class MinIsrConfigTest extends KafkaServerTestHarness { val overridingProps = new Properties() overridingProps.put(KafkaConfig.MinInSyncReplicasProp, "5") diff --git a/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala b/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala index 6a758a7..e05d16b 100755 --- a/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala +++ b/core/src/test/scala/unit/kafka/integration/PrimitiveApiTest.scala @@ -18,13 +18,12 @@ package kafka.integration import java.nio.ByteBuffer -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.api.{PartitionFetchInfo, FetchRequest, FetchRequestBuilder} import kafka.server.{KafkaRequestHandler, KafkaConfig} import kafka.producer.{KeyedMessage, Producer} import org.apache.log4j.{Level, Logger} import kafka.zk.ZooKeeperTestHarness -import org.scalatest.junit.JUnit3Suite import scala.collection._ import kafka.common.{TopicAndPartition, ErrorMapping, UnknownTopicOrPartitionException, OffsetOutOfRangeException} import kafka.utils.{StaticPartitioner, TestUtils, CoreUtils} @@ -34,7 +33,7 @@ import java.util.Properties /** * End to end tests of the primitive apis against a local server */ -class PrimitiveApiTest extends JUnit3Suite with ProducerConsumerTestHarness with ZooKeeperTestHarness { +class PrimitiveApiTest extends ProducerConsumerTestHarness with ZooKeeperTestHarness { val requestHandlerLogger = Logger.getLogger(classOf[KafkaRequestHandler]) def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0, zkConnect))) diff --git a/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala b/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala index 4614a92..cc5954d 100644 --- a/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala +++ b/core/src/test/scala/unit/kafka/integration/ProducerConsumerTestHarness.scala @@ -5,8 +5,8 @@ * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,28 +18,30 @@ package kafka.integration import kafka.consumer.SimpleConsumer -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.producer.Producer import kafka.utils.{StaticPartitioner, TestUtils} import kafka.serializer.StringEncoder -trait ProducerConsumerTestHarness extends JUnit3Suite with KafkaServerTestHarness { - val host = "localhost" - var producer: Producer[String, String] = null - var consumer: SimpleConsumer = null +trait ProducerConsumerTestHarness extends KafkaServerTestHarness { + val host = "localhost" + var producer: Producer[String, String] = null + var consumer: SimpleConsumer = null + @Before override def setUp() { - super.setUp - producer = TestUtils.createProducer[String, String](TestUtils.getBrokerListStrFromServers(servers), - encoder = classOf[StringEncoder].getName, - keyEncoder = classOf[StringEncoder].getName, - partitioner = classOf[StaticPartitioner].getName) - consumer = new SimpleConsumer(host, servers(0).boundPort(), 1000000, 64*1024, "") - } + super.setUp + producer = TestUtils.createProducer[String, String](TestUtils.getBrokerListStrFromServers(servers), + encoder = classOf[StringEncoder].getName, + keyEncoder = classOf[StringEncoder].getName, + partitioner = classOf[StaticPartitioner].getName) + consumer = new SimpleConsumer(host, servers(0).boundPort(), 1000000, 64 * 1024, "") + } - override def tearDown() { - producer.close() - consumer.close() - super.tearDown - } + @After + override def tearDown() { + producer.close() + consumer.close() + super.tearDown + } } diff --git a/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala b/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala index 12d0733..2fd10d8 100755 --- a/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala +++ b/core/src/test/scala/unit/kafka/integration/RollingBounceTest.scala @@ -17,18 +17,19 @@ package kafka.integration -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.{CoreUtils, TestUtils} import kafka.server.{KafkaConfig, KafkaServer} -class RollingBounceTest extends JUnit3Suite with ZooKeeperTestHarness { +class RollingBounceTest extends ZooKeeperTestHarness { val partitionId = 0 var servers: Seq[KafkaServer] = null + @Before override def setUp() { super.setUp() // controlled.shutdown.enable is true by default @@ -39,6 +40,7 @@ class RollingBounceTest extends JUnit3Suite with ZooKeeperTestHarness { servers = configs.map(c => TestUtils.createServer(KafkaConfig.fromProps(c))) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala b/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala index 995b059..4c339bd 100644 --- a/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala +++ b/core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala @@ -18,11 +18,11 @@ package kafka.integration import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.zk.ZooKeeperTestHarness import kafka.admin.AdminUtils import java.nio.ByteBuffer -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.cluster.{BrokerEndPoint, Broker} import kafka.utils.TestUtils import kafka.utils.TestUtils._ @@ -31,10 +31,11 @@ import kafka.api.TopicMetadataRequest import kafka.common.ErrorMapping import kafka.client.ClientUtils -class TopicMetadataTest extends JUnit3Suite with ZooKeeperTestHarness { +class TopicMetadataTest extends ZooKeeperTestHarness { private var server1: KafkaServer = null var brokerEndPoints: Seq[BrokerEndPoint] = null + @Before override def setUp() { super.setUp() val props = createBrokerConfigs(1, zkConnect) @@ -43,6 +44,7 @@ class TopicMetadataTest extends JUnit3Suite with ZooKeeperTestHarness { brokerEndPoints = Seq(new Broker(server1.config.brokerId, server1.config.hostName, server1.boundPort()).getBrokerEndPoint(SecurityProtocol.PLAINTEXT)) } + @After override def tearDown() { server1.shutdown() super.tearDown() diff --git a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala index e4bf2df..5a7c412 100755 --- a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala +++ b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala @@ -18,13 +18,11 @@ package kafka.integration import org.apache.kafka.common.config.ConfigException +import org.junit.{After, Before} -import scala.collection.mutable.MutableList import scala.util.Random import org.apache.log4j.{Level, Logger} -import org.scalatest.junit.JUnit3Suite import java.util.Properties -import junit.framework.Assert._ import kafka.admin.AdminUtils import kafka.common.FailedToSendMessageException import kafka.consumer.{Consumer, ConsumerConfig, ConsumerTimeoutException} @@ -34,8 +32,10 @@ import kafka.server.{KafkaConfig, KafkaServer} import kafka.utils.CoreUtils import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness +import org.junit.Assert._ +import org.scalatest.Assertions._ -class UncleanLeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { +class UncleanLeaderElectionTest extends ZooKeeperTestHarness { val brokerId1 = 0 val brokerId2 = 1 @@ -58,6 +58,7 @@ class UncleanLeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { val syncProducerLogger = Logger.getLogger(classOf[kafka.producer.SyncProducer]) val eventHandlerLogger = Logger.getLogger(classOf[kafka.producer.async.DefaultEventHandler[Object, Object]]) + @Before override def setUp() { super.setUp() @@ -77,6 +78,7 @@ class UncleanLeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { eventHandlerLogger.setLevel(Level.FATAL) } + @After override def tearDown() { servers.foreach(server => shutdownServer(server)) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala b/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala index 74c761d..cf6b9a9 100644 --- a/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala +++ b/core/src/test/scala/unit/kafka/javaapi/consumer/ZookeeperConsumerConnectorTest.scala @@ -20,7 +20,6 @@ package kafka.javaapi.consumer import java.util.Properties import kafka.server._ -import kafka.message._ import kafka.serializer._ import kafka.integration.KafkaServerTestHarness import kafka.producer.KeyedMessage @@ -33,12 +32,11 @@ import kafka.common.MessageStreamsExistException import scala.collection.JavaConversions -import org.scalatest.junit.JUnit3Suite import org.apache.log4j.{Level, Logger} -import junit.framework.Assert._ +import org.junit.Assert._ -class ZookeeperConsumerConnectorTest extends JUnit3Suite with KafkaServerTestHarness with ZooKeeperTestHarness with Logging { +class ZookeeperConsumerConnectorTest extends KafkaServerTestHarness with ZooKeeperTestHarness with Logging { val numNodes = 2 val numParts = 2 val topic = "topic1" diff --git a/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala b/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala index 726399e..80f809e 100644 --- a/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala +++ b/core/src/test/scala/unit/kafka/javaapi/message/BaseMessageSetTestCases.scala @@ -17,7 +17,7 @@ package kafka.javaapi.message -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.Test import kafka.utils.TestUtils diff --git a/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala b/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala index 383fcef..fbdb000 100644 --- a/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala +++ b/core/src/test/scala/unit/kafka/javaapi/message/ByteBufferMessageSetTest.scala @@ -17,7 +17,7 @@ package kafka.javaapi.message -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import kafka.message.{DefaultCompressionCodec, CompressionCodec, NoCompressionCodec, Message} diff --git a/core/src/test/scala/unit/kafka/log/CleanerTest.scala b/core/src/test/scala/unit/kafka/log/CleanerTest.scala index 8b8249a..ae9f38a 100755 --- a/core/src/test/scala/unit/kafka/log/CleanerTest.scala +++ b/core/src/test/scala/unit/kafka/log/CleanerTest.scala @@ -17,7 +17,7 @@ package kafka.log -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.{After, Test} import java.nio._ diff --git a/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala b/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala index cec1cae..255a669 100644 --- a/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala +++ b/core/src/test/scala/unit/kafka/log/FileMessageSetTest.scala @@ -19,7 +19,7 @@ package kafka.log import java.nio._ import java.util.concurrent.atomic._ -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.TestUtils._ import kafka.message._ import org.junit.Test diff --git a/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala b/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala index 471ddff..da80f6e 100755 --- a/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala @@ -29,16 +29,14 @@ import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.runners.Parameterized.Parameters -import org.scalatest.junit.JUnit3Suite import scala.collection._ - /** * This is an integration test that tests the fully integrated log cleaner */ @RunWith(value = classOf[Parameterized]) -class LogCleanerIntegrationTest(compressionCodec: String) extends JUnit3Suite { +class LogCleanerIntegrationTest(compressionCodec: String) { val time = new MockTime() val segmentSize = 100 diff --git a/core/src/test/scala/unit/kafka/log/LogConfigTest.scala b/core/src/test/scala/unit/kafka/log/LogConfigTest.scala index 3fd5a53..0513df4 100644 --- a/core/src/test/scala/unit/kafka/log/LogConfigTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogConfigTest.scala @@ -21,9 +21,9 @@ import java.util.Properties import org.apache.kafka.common.config.ConfigException import org.junit.{Assert, Test} -import org.scalatest.junit.JUnit3Suite +import org.scalatest.Assertions._ -class LogConfigTest extends JUnit3Suite { +class LogConfigTest { @Test def testFromPropsDefaults() { diff --git a/core/src/test/scala/unit/kafka/log/LogManagerTest.scala b/core/src/test/scala/unit/kafka/log/LogManagerTest.scala index 01dfbc4..1e14f6e 100755 --- a/core/src/test/scala/unit/kafka/log/LogManagerTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogManagerTest.scala @@ -18,14 +18,13 @@ package kafka.log import java.io._ -import junit.framework.Assert._ -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import kafka.server.{BrokerState, OffsetCheckpoint} +import kafka.server.OffsetCheckpoint import kafka.common._ import kafka.utils._ +import org.junit.{After, Before, Test} +import org.junit.Assert._ -class LogManagerTest extends JUnit3Suite { +class LogManagerTest { val time: MockTime = new MockTime() val maxRollInterval = 100 @@ -36,20 +35,20 @@ class LogManagerTest extends JUnit3Suite { val name = "kafka" val veryLargeLogFlushInterval = 10000000L - override def setUp() { - super.setUp() + @Before + def setUp() { logDir = TestUtils.tempDir() logManager = createLogManager() logManager.startup logDir = logManager.logDirs(0) } - override def tearDown() { + @After + def tearDown() { if(logManager != null) logManager.shutdown() CoreUtils.rm(logDir) logManager.logDirs.foreach(CoreUtils.rm(_)) - super.tearDown() } /** diff --git a/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala b/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala index 03fb351..25eb2a0 100644 --- a/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogSegmentTest.scala @@ -16,19 +16,15 @@ */ package kafka.log -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ -import java.io.File -import java.io.RandomAccessFile -import java.util.Random import org.junit.{Test, After} -import org.scalatest.junit.JUnit3Suite import kafka.utils.TestUtils import kafka.message._ import kafka.utils.SystemTime import scala.collection._ -class LogSegmentTest extends JUnit3Suite { +class LogSegmentTest { val segments = mutable.ArrayBuffer[LogSegment]() diff --git a/core/src/test/scala/unit/kafka/log/LogTest.scala b/core/src/test/scala/unit/kafka/log/LogTest.scala index 8e095d6..e036ee8 100755 --- a/core/src/test/scala/unit/kafka/log/LogTest.scala +++ b/core/src/test/scala/unit/kafka/log/LogTest.scala @@ -19,7 +19,7 @@ package kafka.log import java.io._ import java.util.concurrent.atomic._ -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.{After, Before, Test} import kafka.message._ diff --git a/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala b/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala index 9213a5d..dfd7b54 100644 --- a/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala +++ b/core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala @@ -18,7 +18,7 @@ package kafka.log import java.io._ -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.{Collections, Arrays} import org.junit._ import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala b/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala index 12ce39e..f50daa4 100644 --- a/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala +++ b/core/src/test/scala/unit/kafka/log/OffsetMapTest.scala @@ -20,7 +20,7 @@ package kafka.log import java.nio._ import org.junit._ import org.scalatest.junit.JUnitSuite -import junit.framework.Assert._ +import org.junit.Assert._ class OffsetMapTest extends JUnitSuite { diff --git a/core/src/test/scala/unit/kafka/log4j/KafkaLog4jAppenderTest.scala b/core/src/test/scala/unit/kafka/log4j/KafkaLog4jAppenderTest.scala index 41366a1..b4c13ed 100755 --- a/core/src/test/scala/unit/kafka/log4j/KafkaLog4jAppenderTest.scala +++ b/core/src/test/scala/unit/kafka/log4j/KafkaLog4jAppenderTest.scala @@ -31,11 +31,10 @@ import java.io.File import org.apache.log4j.spi.LoggingEvent import org.apache.log4j.{PropertyConfigurator, Logger} import org.junit.{After, Before, Test} -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.Assert._ -class KafkaLog4jAppenderTest extends JUnit3Suite with ZooKeeperTestHarness with Logging { +class KafkaLog4jAppenderTest extends ZooKeeperTestHarness with Logging { var logDirZk: File = null var config: KafkaConfig = null diff --git a/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala b/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala index dd8847f..208994b 100644 --- a/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala +++ b/core/src/test/scala/unit/kafka/message/BaseMessageSetTestCases.scala @@ -18,7 +18,7 @@ package kafka.message import java.io.RandomAccessFile -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.TestUtils._ import kafka.log.FileMessageSet import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala b/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala index 07bc317..511060e 100644 --- a/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala +++ b/core/src/test/scala/unit/kafka/message/ByteBufferMessageSetTest.scala @@ -19,7 +19,7 @@ package kafka.message import java.nio._ import java.util.concurrent.atomic.AtomicLong -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import kafka.utils.TestUtils diff --git a/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala b/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala index 76987d4..f45bead 100644 --- a/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala +++ b/core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala @@ -21,7 +21,7 @@ import java.io.ByteArrayOutputStream import scala.collection._ import org.scalatest.junit.JUnitSuite import org.junit._ -import junit.framework.Assert._ +import org.junit.Assert._ class MessageCompressionTest extends JUnitSuite { diff --git a/core/src/test/scala/unit/kafka/message/MessageTest.scala b/core/src/test/scala/unit/kafka/message/MessageTest.scala index 11c0f81..3c12d13 100755 --- a/core/src/test/scala/unit/kafka/message/MessageTest.scala +++ b/core/src/test/scala/unit/kafka/message/MessageTest.scala @@ -20,7 +20,7 @@ package kafka.message import java.nio._ import java.util.HashMap import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import org.junit.{Before, Test} import kafka.utils.TestUtils diff --git a/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala b/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala index b08a343..3993fdb 100644 --- a/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala +++ b/core/src/test/scala/unit/kafka/message/MessageWriterTest.scala @@ -20,7 +20,7 @@ package kafka.message import java.io.{InputStream, ByteArrayInputStream, ByteArrayOutputStream} import java.nio.ByteBuffer import java.util.Random -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import org.scalatest.junit.JUnitSuite diff --git a/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala b/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala index 7df7405..3b3e4c3 100644 --- a/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala +++ b/core/src/test/scala/unit/kafka/metrics/KafkaTimerTest.scala @@ -18,12 +18,11 @@ package kafka.metrics */ import org.junit.Test -import org.scalatest.junit.JUnit3Suite import java.util.concurrent.TimeUnit -import junit.framework.Assert._ +import org.junit.Assert._ import com.yammer.metrics.core.{MetricsRegistry, Clock} -class KafkaTimerTest extends JUnit3Suite { +class KafkaTimerTest { @Test def testKafkaTimer() { diff --git a/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala b/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala index b42101b..5a268d1 100644 --- a/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala +++ b/core/src/test/scala/unit/kafka/metrics/MetricsTest.scala @@ -22,10 +22,9 @@ import java.util.Properties import com.yammer.metrics.Metrics import com.yammer.metrics.core.MetricPredicate import org.junit.Test -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.integration.KafkaServerTestHarness import kafka.server._ -import kafka.message._ import kafka.serializer._ import kafka.utils._ import kafka.admin.AdminUtils @@ -33,9 +32,8 @@ import kafka.utils.TestUtils._ import scala.collection._ import scala.collection.JavaConversions._ import scala.util.matching.Regex -import org.scalatest.junit.JUnit3Suite -class MetricsTest extends JUnit3Suite with KafkaServerTestHarness with Logging { +class MetricsTest extends KafkaServerTestHarness with Logging { val numNodes = 2 val numParts = 2 val topic = "topic1" diff --git a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala index 7dc2fad..b97a6eb 100644 --- a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala +++ b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala @@ -25,16 +25,14 @@ import org.apache.kafka.common.network.NetworkSend import org.apache.kafka.common.protocol.SecurityProtocol import org.apache.kafka.common.utils.SystemTime import org.junit._ -import org.scalatest.junit.JUnitSuite import java.util.Random -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.producer.SyncProducerConfig import kafka.api.ProducerRequest import java.nio.ByteBuffer import kafka.common.TopicAndPartition import kafka.message.ByteBufferMessageSet -import java.nio.channels.SelectionKey -import kafka.utils.TestUtils +import org.scalatest.junit.JUnitSuite import scala.collection.Map class SocketServerTest extends JUnitSuite { @@ -84,11 +82,11 @@ class SocketServerTest extends JUnitSuite { new Socket("localhost", server.boundPort(protocol)) } - @After def cleanup() { server.shutdown() } + @Test def simpleRequest() { val plainSocket = connect(protocol = SecurityProtocol.PLAINTEXT) @@ -175,7 +173,7 @@ class SocketServerTest extends JUnitSuite { } @Test - def testMaxConnectionsPerIPOverrides(): Unit = { + def testMaxConnectionsPerIPOverrides() { val overrideNum = 6 val overrides: Map[String, Int] = Map("localhost" -> overrideNum) val overrideServer: SocketServer = new SocketServer(0, diff --git a/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala b/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala index be4bb87..b54f30e 100755 --- a/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala +++ b/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala @@ -19,36 +19,27 @@ package kafka.producer import java.util.Properties import java.util.concurrent.LinkedBlockingQueue -import junit.framework.Assert._ +import org.junit.Assert._ import org.easymock.EasyMock import org.junit.Test import kafka.api._ -import kafka.cluster.{BrokerEndPoint, Broker} +import kafka.cluster.BrokerEndPoint import kafka.common._ import kafka.message._ import kafka.producer.async._ import kafka.serializer._ import kafka.server.KafkaConfig import kafka.utils.TestUtils._ -import org.scalatest.junit.JUnit3Suite import scala.collection.Map import scala.collection.mutable.ArrayBuffer import kafka.utils._ -class AsyncProducerTest extends JUnit3Suite { +class AsyncProducerTest { // One of the few cases we can just set a fixed port because the producer is mocked out here since this uses mocks val props = Seq(createBrokerConfig(1, "127.0.0.1:1", port=65534)) val configs = props.map(KafkaConfig.fromProps) val brokerList = configs.map(c => org.apache.kafka.common.utils.Utils.formatAddress(c.hostName, c.port)).mkString(",") - override def setUp() { - super.setUp() - } - - override def tearDown() { - super.tearDown() - } - @Test def testProducerQueueSize() { // a mock event handler that blocks diff --git a/core/src/test/scala/unit/kafka/producer/ProducerTest.scala b/core/src/test/scala/unit/kafka/producer/ProducerTest.scala index 4d2536b..a7267b9 100755 --- a/core/src/test/scala/unit/kafka/producer/ProducerTest.scala +++ b/core/src/test/scala/unit/kafka/producer/ProducerTest.scala @@ -17,26 +17,23 @@ package kafka.producer -import org.scalatest.TestFailedException -import org.scalatest.junit.JUnit3Suite +import org.scalatest.{Assertions, TestFailedException} import kafka.consumer.SimpleConsumer import kafka.message.Message import kafka.server.{KafkaConfig, KafkaRequestHandler, KafkaServer} import kafka.zk.ZooKeeperTestHarness import org.apache.log4j.{Level, Logger} -import org.junit.Test +import org.junit.{After, Before, Test} import kafka.utils._ import java.util import kafka.admin.AdminUtils import util.Properties import kafka.api.FetchRequestBuilder -import org.junit.Assert.assertTrue -import org.junit.Assert.assertFalse -import org.junit.Assert.assertEquals import kafka.common.{ErrorMapping, FailedToSendMessageException} import kafka.serializer.StringEncoder +import org.junit.Assert._ -class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ +class ProducerTest extends ZooKeeperTestHarness with Logging{ private val brokerId1 = 0 private val brokerId2 = 1 private var server1: KafkaServer = null @@ -60,6 +57,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ consumer2 } + @Before override def setUp() { super.setUp() // set up 2 brokers with 4 partitions each @@ -81,6 +79,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ requestHandlerLogger.setLevel(Level.FATAL) } + @After override def tearDown() { // restore set request handler logger to a higher level requestHandlerLogger.setLevel(Level.ERROR) @@ -116,7 +115,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ fail("Test should fail because the broker list provided are not valid") } catch { case e: FailedToSendMessageException => // this is expected - case oe: Throwable => fail("fails with exception", oe) + case oe: Throwable => Assertions.fail("fails with exception", oe) } finally { producer1.close() } @@ -129,7 +128,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ try{ producer2.send(new KeyedMessage[String, String](topic, "test", "test1")) } catch { - case e: Throwable => fail("Should succeed sending the message", e) + case e: Throwable => Assertions.fail("Should succeed sending the message", e) } finally { producer2.close() } @@ -142,7 +141,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ try{ producer3.send(new KeyedMessage[String, String](topic, "test", "test1")) } catch { - case e: Throwable => fail("Should succeed sending the message", e) + case e: Throwable => Assertions.fail("Should succeed sending the message", e) } finally { producer3.close() } @@ -201,7 +200,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ } catch { case iae: IllegalArgumentException => // this is expected - case e: Throwable => fail("Not expected", e) + case e: Throwable => Assertions.fail("Not expected", e) } } @@ -261,7 +260,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ assertEquals(new Message(bytes = "test1".getBytes, key = "test".getBytes), messageSet1.next.message) assertFalse("Message set should have another message", messageSet1.hasNext) } catch { - case e: Exception => fail("Not expected", e) + case e: Exception => Assertions.fail("Not expected", e) } producer.close } @@ -295,7 +294,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ assertTrue("Message set should have 1 message", messageSet1.hasNext) assertEquals(new Message("test".getBytes), messageSet1.next.message) } catch { - case e: Throwable => case e: Exception => producer.close; fail("Not expected", e) + case e: Throwable => case e: Exception => producer.close; Assertions.fail("Not expected", e) } // stop IO threads and request handling, but leave networking operational @@ -309,7 +308,7 @@ class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{ producer.send(new KeyedMessage[String, String](topic, "test", "test")) } catch { case e: FailedToSendMessageException => /* success */ - case e: Exception => fail("Not expected", e) + case e: Exception => Assertions.fail("Not expected", e) } finally { producer.close() } diff --git a/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala b/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala index 8c3fb7a..90689f6 100644 --- a/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala +++ b/core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala @@ -20,7 +20,7 @@ package kafka.producer import java.net.SocketTimeoutException import java.util.Properties -import junit.framework.Assert +import org.junit.Assert import kafka.admin.AdminUtils import kafka.api.ProducerResponseStatus import kafka.common.{ErrorMapping, TopicAndPartition} @@ -30,9 +30,8 @@ import kafka.server.KafkaConfig import kafka.utils._ import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.Test -import org.scalatest.junit.JUnit3Suite -class SyncProducerTest extends JUnit3Suite with KafkaServerTestHarness { +class SyncProducerTest extends KafkaServerTestHarness { private val messageBytes = new Array[Byte](2) // turning off controlled shutdown since testProducerCanTimeout() explicitly shuts down request handler pool. def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfigs(1, zkConnect, false).head)) diff --git a/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala b/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala index e899b02..066f506 100755 --- a/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala +++ b/core/src/test/scala/unit/kafka/server/AdvertiseBrokerTest.scala @@ -17,18 +17,19 @@ package kafka.server -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.utils.{TestUtils, CoreUtils, ZkUtils} import kafka.zk.ZooKeeperTestHarness import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} -class AdvertiseBrokerTest extends JUnit3Suite with ZooKeeperTestHarness { +class AdvertiseBrokerTest extends ZooKeeperTestHarness { var server : KafkaServer = null val brokerId = 0 val advertisedHostName = "routable-host" val advertisedPort = 1234 + @Before override def setUp() { super.setUp() @@ -39,6 +40,7 @@ class AdvertiseBrokerTest extends JUnit3Suite with ZooKeeperTestHarness { server = TestUtils.createServer(KafkaConfig.fromProps(props)) } + @After override def tearDown() { server.shutdown() CoreUtils.rm(server.config.logDirs) diff --git a/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala b/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala index f3ab3f4..df8d5b1 100644 --- a/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala +++ b/core/src/test/scala/unit/kafka/server/DelayedOperationTest.scala @@ -17,22 +17,21 @@ package kafka.server -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.{After, Before, Test} +import org.junit.Assert._ -class DelayedOperationTest extends JUnit3Suite { +class DelayedOperationTest { var purgatory: DelayedOperationPurgatory[MockDelayedOperation] = null - override def setUp() { - super.setUp() + @Before + def setUp() { purgatory = new DelayedOperationPurgatory[MockDelayedOperation](purgatoryName = "mock") } - override def tearDown() { + @After + def tearDown() { purgatory.shutdown() - super.tearDown() } @Test diff --git a/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala b/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala index 7877f6c..405ccf8 100644 --- a/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala +++ b/core/src/test/scala/unit/kafka/server/DynamicConfigChangeTest.scala @@ -16,16 +16,15 @@ */ package kafka.server -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test import kafka.integration.KafkaServerTestHarness import kafka.utils._ import kafka.common._ import kafka.log.LogConfig import kafka.admin.{AdminOperationException, AdminUtils} -import org.scalatest.junit.JUnit3Suite -class DynamicConfigChangeTest extends JUnit3Suite with KafkaServerTestHarness { +class DynamicConfigChangeTest extends KafkaServerTestHarness { def generateConfigs() = List(KafkaConfig.fromProps(TestUtils.createBrokerConfig(0, zkConnect))) @Test diff --git a/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala b/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala index 60cd824..7f55a80 100755 --- a/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala +++ b/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala @@ -19,7 +19,6 @@ package kafka.server import kafka.log._ import java.io.File import org.I0Itec.zkclient.ZkClient -import org.scalatest.junit.JUnit3Suite import org.easymock.EasyMock import org.junit._ import org.junit.Assert._ @@ -28,7 +27,7 @@ import kafka.cluster.Replica import kafka.utils.{SystemTime, KafkaScheduler, TestUtils, MockTime, CoreUtils} import java.util.concurrent.atomic.AtomicBoolean -class HighwatermarkPersistenceTest extends JUnit3Suite { +class HighwatermarkPersistenceTest { val configs = TestUtils.createBrokerConfigs(2, TestUtils.MockZkConnect).map(KafkaConfig.fromProps) val topic = "foo" diff --git a/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala b/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala index 90529fa..25f0d41 100644 --- a/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala +++ b/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala @@ -18,7 +18,7 @@ package kafka.server import java.util.Properties -import org.scalatest.junit.JUnit3Suite +import org.junit.{Before, After} import collection.mutable.HashMap import collection.mutable.Map import kafka.cluster.{Partition, Replica} @@ -30,7 +30,7 @@ import java.util.concurrent.atomic.AtomicBoolean import kafka.message.MessageSet -class IsrExpirationTest extends JUnit3Suite { +class IsrExpirationTest { var topicPartitionIsr: Map[(String, Int), Seq[Int]] = new HashMap[(String, Int), Seq[Int]]() val replicaLagTimeMaxMs = 100L @@ -46,14 +46,14 @@ class IsrExpirationTest extends JUnit3Suite { var replicaManager: ReplicaManager = null - override def setUp() { - super.setUp() + @Before + def setUp() { replicaManager = new ReplicaManager(configs.head, time, null, null, null, new AtomicBoolean(false)) } - override def tearDown() { + @After + def tearDown() { replicaManager.shutdown(false) - super.tearDown() } /* diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala index c487f36..13e2bdd 100644 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigConfigDefTest.scala @@ -22,12 +22,12 @@ import kafka.api.ApiVersion import kafka.message._ import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.{Assert, Test} -import org.scalatest.junit.JUnit3Suite +import org.scalatest.Assertions._ import scala.collection.Map import scala.util.Random._ -class KafkaConfigConfigDefTest extends JUnit3Suite { +class KafkaConfigConfigDefTest { @Test def testFromPropsDefaults() { diff --git a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index 2428dbd..192e814 100755 --- a/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -19,15 +19,15 @@ package kafka.server import java.util.Properties -import junit.framework.Assert._ import kafka.api.{ApiVersion, KAFKA_082} import kafka.utils.{TestUtils, CoreUtils} import org.apache.kafka.common.config.ConfigException import org.apache.kafka.common.protocol.SecurityProtocol import org.junit.Test -import org.scalatest.junit.JUnit3Suite +import org.junit.Assert._ +import org.scalatest.Assertions._ -class KafkaConfigTest extends JUnit3Suite { +class KafkaConfigTest { @Test def testLogRetentionTimeHoursProvided() { diff --git a/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala b/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala index f1977d8..f77f186 100755 --- a/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala +++ b/core/src/test/scala/unit/kafka/server/LeaderElectionTest.scala @@ -17,7 +17,7 @@ package kafka.server -import junit.framework.Assert._ +import org.junit.Assert._ import kafka.api._ import kafka.utils.{TestUtils, ZkUtils, CoreUtils} import kafka.cluster.Broker @@ -26,9 +26,9 @@ import kafka.controller.{ControllerChannelManager, ControllerContext, LeaderIsrA import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness import org.apache.kafka.common.protocol.SecurityProtocol -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} -class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { +class LeaderElectionTest extends ZooKeeperTestHarness { val brokerId1 = 0 val brokerId2 = 1 @@ -36,6 +36,7 @@ class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { var staleControllerEpochDetected = false + @Before override def setUp() { super.setUp() @@ -48,6 +49,7 @@ class LeaderElectionTest extends JUnit3Suite with ZooKeeperTestHarness { servers ++= List(server1, server2) } + @After override def tearDown() { servers.foreach(_.shutdown()) servers.foreach(server => CoreUtils.rm(server.config.logDirs)) diff --git a/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala b/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala index e57c1de..344001d 100755 --- a/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala +++ b/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala @@ -19,12 +19,11 @@ package kafka.server import java.io.File import kafka.utils._ -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.{Random, Properties} import kafka.consumer.SimpleConsumer import kafka.message.{NoCompressionCodec, ByteBufferMessageSet, Message} import kafka.zk.ZooKeeperTestHarness -import org.scalatest.junit.JUnit3Suite import kafka.admin.AdminUtils import kafka.api.{PartitionOffsetRequestInfo, FetchRequestBuilder, OffsetRequest} import kafka.utils.TestUtils._ @@ -33,7 +32,7 @@ import org.junit.After import org.junit.Before import org.junit.Test -class LogOffsetTest extends JUnit3Suite with ZooKeeperTestHarness { +class LogOffsetTest extends ZooKeeperTestHarness { val random = new Random() var logDir: File = null var topicLogDir: File = null diff --git a/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala b/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala index 7688f26..77e17e1 100755 --- a/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala +++ b/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala @@ -27,10 +27,10 @@ import kafka.serializer.StringEncoder import java.io.File -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import org.junit.Assert._ -class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness { +class LogRecoveryTest extends ZooKeeperTestHarness { val replicaLagTimeMaxMs = 5000L val replicaLagMaxMessages = 10L @@ -69,6 +69,7 @@ class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness { keyEncoder = classOf[IntEncoder].getName) } + @Before override def setUp() { super.setUp() @@ -86,6 +87,7 @@ class LogRecoveryTest extends JUnit3Suite with ZooKeeperTestHarness { updateProducer() } + @After override def tearDown() { producer.close() for(server <- servers) { diff --git a/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala b/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala index 528525b..5b0e4a5 100755 --- a/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala +++ b/core/src/test/scala/unit/kafka/server/OffsetCommitTest.scala @@ -25,7 +25,6 @@ import kafka.utils.TestUtils._ import kafka.zk.ZooKeeperTestHarness import org.junit.{After, Before, Test} -import org.scalatest.junit.JUnit3Suite import java.util.Properties import java.io.File @@ -33,9 +32,9 @@ import java.io.File import scala.util.Random import scala.collection._ -import junit.framework.Assert._ +import org.junit.Assert._ -class OffsetCommitTest extends JUnit3Suite with ZooKeeperTestHarness { +class OffsetCommitTest extends ZooKeeperTestHarness { val random: Random = new Random() val group = "test-group" val retentionCheckInterval: Long = 100L diff --git a/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala b/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala index a3a03db..dead087 100644 --- a/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala +++ b/core/src/test/scala/unit/kafka/server/ReplicaFetchTest.scala @@ -17,7 +17,7 @@ package kafka.server -import org.scalatest.junit.JUnit3Suite +import org.junit.{After, Before} import kafka.zk.ZooKeeperTestHarness import kafka.utils.TestUtils._ import kafka.producer.KeyedMessage @@ -25,11 +25,12 @@ import kafka.serializer.StringEncoder import kafka.utils.{TestUtils} import kafka.common._ -class ReplicaFetchTest extends JUnit3Suite with ZooKeeperTestHarness { +class ReplicaFetchTest extends ZooKeeperTestHarness { var brokers: Seq[KafkaServer] = null val topic1 = "foo" val topic2 = "bar" + @Before override def setUp() { super.setUp() brokers = createBrokerConfigs(2, zkConnect, false) @@ -37,6 +38,7 @@ class ReplicaFetchTest extends JUnit3Suite with ZooKeeperTestHarness { .map(config => TestUtils.createServer(config)) } + @After override def tearDown() { brokers.foreach(_.shutdown()) super.tearDown() diff --git a/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala b/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala index 00d5933..3770cb4 100644 --- a/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala +++ b/core/src/test/scala/unit/kafka/server/ReplicaManagerTest.scala @@ -27,12 +27,11 @@ import java.io.File import org.apache.kafka.common.protocol.Errors import org.easymock.EasyMock import org.I0Itec.zkclient.ZkClient -import org.scalatest.junit.JUnit3Suite import org.junit.Test import scala.collection.Map -class ReplicaManagerTest extends JUnit3Suite { +class ReplicaManagerTest { val topic = "test-topic" @@ -84,7 +83,7 @@ class ReplicaManagerTest extends JUnit3Suite { rm.appendMessages(timeout = 0, requiredAcks = 3, internalTopicsAllowed = false, messagesPerPartition = produceRequest.data, responseCallback = callback) - rm.shutdown(false); + rm.shutdown(false) TestUtils.verifyNonDaemonThreadsStatus diff --git a/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala b/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala index 12269cd..1185a6f 100755 --- a/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala +++ b/core/src/test/scala/unit/kafka/server/ServerGenerateBrokerIdTest.scala @@ -20,18 +20,18 @@ import java.util.Properties import kafka.zk.ZooKeeperTestHarness import kafka.utils.{TestUtils, CoreUtils} -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.{Before, Test} +import org.junit.Assert._ import java.io.File -class ServerGenerateBrokerIdTest extends JUnit3Suite with ZooKeeperTestHarness { +class ServerGenerateBrokerIdTest extends ZooKeeperTestHarness { var props1: Properties = null var config1: KafkaConfig = null var props2: Properties = null var config2: KafkaConfig = null val brokerMetaPropsFile = "meta.properties" + @Before override def setUp() { super.setUp() props1 = TestUtils.createBrokerConfig(-1, zkConnect) diff --git a/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala b/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala index 95534e3..60086c5 100755 --- a/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala +++ b/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala @@ -16,7 +16,7 @@ */ package kafka.server -import kafka.zk.ZooKeeperTestHarness +import kafka.zk.{ZooKeeperTestHarnessBeforeAndAfter, ZooKeeperTestHarness} import kafka.consumer.SimpleConsumer import kafka.producer._ import kafka.utils.{IntEncoder, TestUtils, CoreUtils} @@ -27,19 +27,19 @@ import kafka.serializer.StringEncoder import java.io.File -import org.junit.Test -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.{Before, Test} +import org.junit.Assert._ +import org.scalatest.BeforeAndAfter -class ServerShutdownTest extends JUnit3Suite with ZooKeeperTestHarness { +class ServerShutdownTest extends ZooKeeperTestHarnessBeforeAndAfter { var config: KafkaConfig = null val host = "localhost" val topic = "test" val sent1 = List("hello", "there") val sent2 = List("more", "messages") - override def setUp(): Unit = { - super.setUp() + override def beforeEach() { + super.beforeEach() val props = TestUtils.createBrokerConfig(0, zkConnect) config = KafkaConfig.fromProps(props) } diff --git a/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala b/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala index 60e10b3..7d986ad 100755 --- a/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala +++ b/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala @@ -17,15 +17,14 @@ package kafka.server -import org.scalatest.junit.JUnit3Suite import kafka.utils.ZkUtils import kafka.utils.CoreUtils import kafka.utils.TestUtils import kafka.zk.ZooKeeperTestHarness -import junit.framework.Assert._ +import org.junit.Assert._ -class ServerStartupTest extends JUnit3Suite with ZooKeeperTestHarness { +class ServerStartupTest extends ZooKeeperTestHarness { def testBrokerCreatesZKChroot { val brokerId = 0 diff --git a/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala b/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala index 09a0961..d950665 100644 --- a/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala +++ b/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala @@ -22,18 +22,17 @@ import kafka.cluster.Replica import kafka.common.TopicAndPartition import kafka.log.Log import kafka.message.{MessageSet, ByteBufferMessageSet, Message} +import org.junit.{After, Before} -import scala.Some import java.util.{Properties, Collections} import java.util.concurrent.atomic.AtomicBoolean import collection.JavaConversions._ import org.easymock.EasyMock import org.I0Itec.zkclient.ZkClient -import org.scalatest.junit.JUnit3Suite -import junit.framework.Assert._ +import org.junit.Assert._ -class SimpleFetchTest extends JUnit3Suite { +class SimpleFetchTest { val replicaLagTimeMaxMs = 100L val replicaFetchWaitMaxMs = 100 @@ -63,9 +62,8 @@ class SimpleFetchTest extends JUnit3Suite { var replicaManager: ReplicaManager = null - override def setUp() { - super.setUp() - + @Before + def setUp() { // create nice mock since we don't particularly care about zkclient calls val zkClient = EasyMock.createNiceMock(classOf[ZkClient]) EasyMock.replay(zkClient) @@ -117,9 +115,9 @@ class SimpleFetchTest extends JUnit3Suite { partition.inSyncReplicas = allReplicas.toSet } - override def tearDown() { + @After + def tearDown() { replicaManager.shutdown(false) - super.tearDown() } /** diff --git a/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala b/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala index fd8cf7b..4a070bd 100644 --- a/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala +++ b/core/src/test/scala/unit/kafka/utils/ByteBoundedBlockingQueueTest.scala @@ -19,7 +19,7 @@ package kafka.utils import java.util.concurrent.TimeUnit -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test class ByteBoundedBlockingQueueTest { diff --git a/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala b/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala index 6380b6e..068526e 100644 --- a/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala +++ b/core/src/test/scala/unit/kafka/utils/CommandLineUtilsTest.scala @@ -17,7 +17,7 @@ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.Test class CommandLineUtilsTest { diff --git a/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala b/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala index fbd245c..56f5905 100644 --- a/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala +++ b/core/src/test/scala/unit/kafka/utils/IteratorTemplateTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import org.scalatest.Assertions import org.junit.{Test, After, Before} diff --git a/core/src/test/scala/unit/kafka/utils/JsonTest.scala b/core/src/test/scala/unit/kafka/utils/JsonTest.scala index 93550e8..6c8ed97 100644 --- a/core/src/test/scala/unit/kafka/utils/JsonTest.scala +++ b/core/src/test/scala/unit/kafka/utils/JsonTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import org.junit.{Test, After, Before} class JsonTest { diff --git a/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala b/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala index c96c0ff..1fda068 100644 --- a/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala +++ b/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala @@ -22,13 +22,12 @@ import kafka.server.{ReplicaFetcherManager, KafkaConfig} import kafka.api.LeaderAndIsr import kafka.zk.ZooKeeperTestHarness import kafka.common.TopicAndPartition -import org.scalatest.junit.JUnit3Suite import org.junit.Assert._ -import org.junit.Test +import org.junit.{Before, Test} import org.easymock.EasyMock -class ReplicationUtilsTest extends JUnit3Suite with ZooKeeperTestHarness { +class ReplicationUtilsTest extends ZooKeeperTestHarness { val topic = "my-topic-test" val partitionId = 0 val brokerId = 1 @@ -45,7 +44,7 @@ class ReplicationUtilsTest extends JUnit3Suite with ZooKeeperTestHarness { val topicDataLeaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(1,leaderEpoch,List(1,2),0), controllerEpoch) - + @Before override def setUp() { super.setUp() ZkUtils.createPersistentPath(zkClient,topicPath,topicData) diff --git a/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala b/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala index cfea63b..7c131fc 100644 --- a/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala +++ b/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ import org.junit.{Test, After, Before} import kafka.utils.TestUtils.retry diff --git a/core/src/test/scala/unit/kafka/utils/TestUtils.scala b/core/src/test/scala/unit/kafka/utils/TestUtils.scala index 17e9fe4..a241473 100755 --- a/core/src/test/scala/unit/kafka/utils/TestUtils.scala +++ b/core/src/test/scala/unit/kafka/utils/TestUtils.scala @@ -43,8 +43,7 @@ import kafka.admin.AdminUtils import kafka.producer.ProducerConfig import kafka.log._ -import junit.framework.AssertionFailedError -import junit.framework.Assert._ +import org.junit.Assert._ import org.apache.kafka.clients.producer.KafkaProducer import scala.collection.Map @@ -594,7 +593,7 @@ object TestUtils extends Logging { block return } catch { - case e: AssertionFailedError => + case e: AssertionError => val ellapsed = System.currentTimeMillis - startTime if(ellapsed > maxWaitMs) { throw e diff --git a/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala b/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala index 052aecd..a018dde 100644 --- a/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala +++ b/core/src/test/scala/unit/kafka/utils/timer/TimerTaskListTest.scala @@ -16,7 +16,7 @@ */ package kafka.utils.timer -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ import org.junit.{Test, After, Before} diff --git a/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala b/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala index 8507592..95de378 100644 --- a/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala +++ b/core/src/test/scala/unit/kafka/utils/timer/TimerTest.scala @@ -18,7 +18,7 @@ package kafka.utils.timer import java.util.concurrent.{CountDownLatch, ExecutorService, Executors, TimeUnit} -import junit.framework.Assert._ +import org.junit.Assert._ import java.util.concurrent.atomic._ import org.junit.{Test, After, Before} diff --git a/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala b/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala index 2be1619..247aa6e 100644 --- a/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala +++ b/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala @@ -18,13 +18,11 @@ package kafka.zk import kafka.consumer.ConsumerConfig -import org.I0Itec.zkclient.ZkClient import kafka.utils.ZkUtils import kafka.utils.TestUtils import org.junit.Assert -import org.scalatest.junit.JUnit3Suite -class ZKEphemeralTest extends JUnit3Suite with ZooKeeperTestHarness { +class ZKEphemeralTest extends ZooKeeperTestHarness { var zkSessionTimeoutMs = 1000 def testEphemeralNodeCleanup = { diff --git a/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala b/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala index d3e44c6..35c635a 100644 --- a/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala +++ b/core/src/test/scala/unit/kafka/zk/ZKPathTest.scala @@ -17,13 +17,12 @@ package kafka.zk -import junit.framework.Assert import kafka.consumer.ConsumerConfig import kafka.utils.{ZkPath, TestUtils, ZkUtils} import org.apache.kafka.common.config.ConfigException -import org.scalatest.junit.JUnit3Suite +import org.junit.Assert._ -class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { +class ZKPathTest extends ZooKeeperTestHarness { val path: String = "/some_dir" val zkSessionTimeoutMs = 1000 @@ -54,7 +53,7 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create persistent path") } - Assert.assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) + assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) } def testMakeSurePersistsPathExistsThrowsException { @@ -82,7 +81,7 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create persistent path") } - Assert.assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) + assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, path)) } def testCreateEphemeralPathThrowsException { @@ -110,7 +109,7 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create ephemeral path") } - Assert.assertTrue("Failed to create ephemeral path", ZkUtils.pathExists(zkClient, path)) + assertTrue("Failed to create ephemeral path", ZkUtils.pathExists(zkClient, path)) } def testCreatePersistentSequentialThrowsException { @@ -140,6 +139,6 @@ class ZKPathTest extends JUnit3Suite with ZooKeeperTestHarness { case exception: Throwable => fail("Failed to create persistent path") } - Assert.assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, actualPath)) + assertTrue("Failed to create persistent path", ZkUtils.pathExists(zkClient, actualPath)) } } diff --git a/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala b/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala index 1f4d10d..ed5fe9b 100755 --- a/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala +++ b/core/src/test/scala/unit/kafka/zk/ZooKeeperTestHarness.scala @@ -17,11 +17,11 @@ package kafka.zk -import org.scalatest.junit.JUnit3Suite import org.I0Itec.zkclient.ZkClient import kafka.utils.{ZkUtils, CoreUtils} +import org.junit.{After, Before} -trait ZooKeeperTestHarness extends JUnit3Suite { +trait ZooKeeperTestHarness { var zkPort: Int = -1 var zookeeper: EmbeddedZookeeper = null var zkClient: ZkClient = null @@ -30,17 +30,17 @@ trait ZooKeeperTestHarness extends JUnit3Suite { def zkConnect: String = "127.0.0.1:" + zkPort - override def setUp() { - super.setUp + @Before + def setUp() { zookeeper = new EmbeddedZookeeper() zkPort = zookeeper.port zkClient = ZkUtils.createZkClient(zkConnect, zkSessionTimeout, zkConnectionTimeout) } - override def tearDown() { + @After + def tearDown() { CoreUtils.swallow(zkClient.close()) CoreUtils.swallow(zookeeper.shutdown()) - super.tearDown } } -- 2.4.3