Skip to content

Instantly share code, notes, and snippets.

@ctoestreich
Created February 25, 2019 15:31
Show Gist options
  • Save ctoestreich/1be7ea1c512d0cdf3a672febe9b70a97 to your computer and use it in GitHub Desktop.
Save ctoestreich/1be7ea1c512d0cdf3a672febe9b70a97 to your computer and use it in GitHub Desktop.
KafkaClientSpec.groovy
/*
* Copyright 2017-2019 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.company.test
import com.company.avro.PersonObject //todo: this is not my real object, sanitized this for business reasons
import io.micronaut.configuration.kafka.annotation.KafkaListener
import io.micronaut.configuration.kafka.annotation.OffsetReset
import io.micronaut.configuration.kafka.annotation.OffsetStrategy
import io.micronaut.configuration.kafka.annotation.Topic
import io.micronaut.configuration.kafka.config.AbstractKafkaConfiguration
import io.micronaut.context.ApplicationContext
import io.micronaut.context.annotation.Property
import io.micronaut.core.util.CollectionUtils
import io.micronaut.runtime.server.EmbeddedServer
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.consumer.ConsumerRecord
import spock.lang.AutoCleanup
import spock.lang.Shared
import spock.lang.Specification
import spock.util.concurrent.PollingConditions
import javax.inject.Singleton
class KafkaClientSpec extends Specification {
@Shared
@AutoCleanup
EmbeddedServer embeddedServer = ApplicationContext.run(EmbeddedServer,
CollectionUtils.mapOf(
"kafka.bootstrap.servers", 'localhost:${random.port}',
"kafka.schema.registry.url", 'http://localhost:8080',
AbstractKafkaConfiguration.EMBEDDED, true,
AbstractKafkaConfiguration.EMBEDDED_TOPICS, [PersonObjectClient.OUTPUT]
)
)
void "test send message when Kafka is not available"() {
given:
PersonObjectClient client = embeddedServer.applicationContext.getBean(PersonObjectClient)
PersonObjectListener listener = embeddedServer.applicationContext.getBean(PersonObjectListener)
PollingConditions conditions = new PollingConditions(timeout: 30, delay: 1)
PersonObject personObject = PersonObject.newBuilder()
.setEventKey("12345")
.build()
when:
client.publishEvent(personObject.getEventKey().toString(), personObject)
then:
conditions.eventually {
listener.events.size() == 1
listener.events.contains("12345")
}
}
@Singleton
static class PersonObjectListener {
List<String> events = []
@KafkaListener(
offsetReset = OffsetReset.EARLIEST,
offsetStrategy = OffsetStrategy.AUTO,
properties = [
@Property(name = ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, value = "io.confluent.kafka.serializers.KafkaAvroDeserializer"),
@Property(name = ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, value = "io.confluent.kafka.serializers.KafkaAvroDeserializer"),
]
)
@Topic(PersonObjectClient.OUTPUT)
void receive(ConsumerRecord<String, PersonObject> consumerRecord) {
events.add(consumerRecord.key())
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment