Kafka consumer ClassNotFoundException - java

(Before start question, my English might not be enough to describe all clearly. Kindly let me know if you don't understand.)
I am trying to send an data object from A spring project(producer) to B spring project(consumer) by Kafka.
Problem is that data objects in A and B are having different classpath. So B project data class is not able to map A project's field.
But two objects have same fields. So i want to get object from A project as an argument on B project.
Error message
Listener failed; nested exception is
org.springframework.kafka.support.serializer.DeserializationException: failed to deserialize; nested exception is
org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.example.springboot.DTO.kafka.PostViewCountDTO]; nested exception is
java.lang.ClassNotFoundException: com.example.springboot.DTO.kafka.PostViewCountDTO
build.gradle
implementation 'org.apache.kafka:kafka-clients:2.8.0'
implementation 'org.apache.kafka:kafka_2.13:2.8.0'
implementation 'org.springframework.boot:spring-boot-starter-web:2.5.3'
Data class (using A and B projects)
public class PostViewCountDTO implements Serializable {
private static final long serialVersionUID = 1L;
#NotNull
private long postNo;
}
producer config
#Configuration
public class PostViewProducerConfig {
#Value("${spring.kafka.producer.bootstrap-servers}")
private String bootstrapServer;
#Bean
public Map<String,Object> postViewProducerConfigs() {
return JsonSerializer.getStringObjectMap(bootstrapServer);
}
#Bean
public ProducerFactory<String, PostViewCountDTO> postViewCountDTOProducerFactory() {
return new DefaultKafkaProducerFactory<>(postViewProducerConfigs());
}
#Bean
public KafkaTemplate<String, PostViewCountDTO> postViewDTOKafkaTemplate() {
return new KafkaTemplate<>(postViewCountDTOProducerFactory());
}
}
Common JsonSerializer class
public class JsonSerializer {
static Map<String, Object> getStringObjectMap(String bootstrapServer) {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, org.springframework.kafka.support.serializer.JsonSerializer.class);
return props;
}
}
consumer config
#Configuration
#RequiredArgsConstructor
public class PostViewConsumerConfig {
#Value("${spring.kafka.consumer.bootstrap-servers}")
private String bootstrapServer;
#Bean
public Map<String,Object> postViewConsumerConfigs() {
return JsonDeserializer.getStringObjectMap(bootstrapServer);
}
#Bean
public ConsumerFactory<String, PostViewCountDTO> postViewCountDTO_ConsumerFactory() {
return new DefaultKafkaConsumerFactory<>(postViewConsumerConfigs());
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, PostViewCountDTO> postViewCountListener() {
ConcurrentKafkaListenerContainerFactory<String, PostViewCountDTO> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(postViewCountDTO_ConsumerFactory());
return factory;
}
#Bean
public StringJsonMessageConverter jsonConverter() {
return new StringJsonMessageConverter();
}
}
produce
#Async
public void sendPostNo(PostViewCountDTO postViewCountDTO) {
postViewKafkaTemplate.send(topic_viewCount, null, postViewCountDTO);
}
consume
#KafkaListener(topics = topic_viewCount, groupId = groupId, containerFactory = "postViewCountListener")
public void consume(#Payload PostViewCountDTO postViewCountDTO) {
...
}

You need to add type mappings to the serializer and deserializer
https://docs.spring.io/spring-kafka/docs/current/reference/html/#serdes-mapping-types
On the producer side, map com.a.PostViewCountDTO to PostViewCountDTO.
On the consumer side, map com.b.PostViewCountDTO to PostViewCountDTO.

Related

How to launch kafka consumer before producer

I have a banch of microservices interacting with kafka topic. One of the microservice should consume two integers and after that sum them and send to the topic. The problem is that I cannot configure microservice so that consumer to be launched before the producer. My code is as follows:
#SpringBootApplication
public class AdderApplication {
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(AdderApplication.class, args);
AdderConsumer consumer = context.getBean(AdderConsumer.class);
AdderProducer producer = context.getBean(AdderProducer.class);
producer.sumTwoIntegers();
}
#Component
public class AdderConsumer extends Controller {
private CountDownLatch latch = new CountDownLatch(3);
#KafkaListener(topics = "${kafka.topic.name}")
public void listenToPartitionWithOffset(#Payload Integer message) {
if (message != null) {
list.add(message);
isProduce = true;
System.out.println(list);
}
}
#Component
public class AdderProducer extends Controller {
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#Value("${kafka.topic.name}")
private String topicName;
public void sumTwoIntegers() {
// logic
}
private void sendMessage(String message) {
// logic
}
#Configuration
#EnableKafka
public class KafkaConfig {
#Value("${kafka.boot.server}")
private String kafkaServer;
#Value("${kafka.consumer.group.id}")
private String kafkaGroupId;
#Bean
public LoggingErrorHandler errorHandler(){
return new LoggingErrorHandler();
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerConfig());
}
#Bean
public ProducerFactory<String, String> producerConfig() {
Map<String, Object> config = new HashMap<>();
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<>(config);
}
#Bean
public ConsumerFactory<String, Integer> consumerConfig() {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
config.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaGroupId);
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
return new DefaultKafkaConsumerFactory<>(config);
}
#Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, Integer>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Integer> listener = new ConcurrentKafkaListenerContainerFactory<>();
listener.setConsumerFactory(consumerConfig());
listener.setErrorHandler(errorHandler());
return listener;
}
I debugged the code and it calls both producer and listener, however I need listener to receive both integers first and only after that to call producer method.
I will appreciate your thoughts.
I found the decision. I implemented timer for producer and it periodically invoked when application is running.
So when it invokes for the firs time with no data to process it posts nothing, then customer consumes data and the second invocation of producer posts the necessary data to the topic.
THe other solution is to call producer method from consumer.

How to test if method with #KafkaListener is being called

I'm really struggling to write a test to check if my Kafka Consumer is being correctly called when messages are sent to it's designated topic.
My consumer:
#Service
#Slf4j
#AllArgsConstructor(onConstructor = #__(#Autowired))
public class ProcessingConsumer {
private AppService appService;
#KafkaListener(
topics = "${topic}",
containerFactory = "processingConsumerContainerFactory")
public void listen(ConsumerRecord<Key, Value> message, Acknowledgment ack) {
try {
appService.processMessage(message);
ack.acknowledge();
} catch (Throwable t) {
log.error("error while processing message!", t);
}
}
}
My consumer config:
#EnableKafka
#Configuration
public class ProcessingCosumerConfig {
#Value("${spring.kafka.schema-registry-url}")
private String schemaRegistryUrl;
private KafkaProperties props;
public ProcessingCosumerConfig(KafkaProperties kafkaProperties) {
this.props = kafkaProperties;
}
public Map<String, Object> deserializerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
return props;
}
private KafkaAvroDeserializer getKafkaAvroDeserializer(Boolean isKey) {
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
kafkaAvroDeserializer.configure(deserializerConfigs(), isKey);
return kafkaAvroDeserializer;
}
private DefaultKafkaConsumerFactory consumerFactory() {
return new DefaultKafkaConsumerFactory<>(
props.buildConsumerProperties(),
getKafkaAvroDeserializer(true),
getKafkaAvroDeserializer(false));
}
#Bean(name = "processingConsumerContainerFactory")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Key, Value>>
kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Key, Value>
factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.getContainerProperties().setAckOnError(false);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
factory.setErrorHandler(new SeekToCurrentErrorHandler());
return factory;
}
}
Finally, my (wannabe) test:
#DirtiesContext
public class ProcessingConsumerTest extends BaseIntegrationTest{
#Autowired private ProcessingProducerFixture processingProducer;
#Autowired private ProcessingConsumer processingConsumer;
#org.springframework.beans.factory.annotation.Value("${topic}")
String topic;
#Test
public void consumer_shouldConsumeMessages_whenMessagesAreSent() throws Exception{
Thread.sleep(1000);
ProducerRecord<Key, Value> message = new ProducerRecord<>(topic, new Key("b"), new Value("a", "b", "c", "d"));
processingProducer.send(message);
}
}
And that's about it for all I have so far.
I've tried checking if this approach gets to the consumer manually using debug and also even just putting simple prints there but the execution simply doesn't seems to get there. Also, if it could be somehow called correctly by my tests, I have no idea what to do to actually assert it in the actual test.
Inject a mock AppService into the listener and verify its processMessage() was called.

A dependency autowired correctly in constructor gets null later on automatically for same Service object with KafkaListener

#EnableKafka
#Configuration
public class KafkaConfiguration {
#Bean
public ConsumerFactory<String, String> consumerFactory() {
//configurations
return new DefaultKafkaConsumerFactory<>(config);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
#Service
public class KafkaConsumer {
private Dep dependency;
#Autowired
public KafkaConsumer(Dep dep){
this.dependency = dep;
}
#KafkaListener(topics = "Kafka_Example", containerFactory = "kafkaListenerContainerFactory")
public void consume(String message) {
System.out.println("Consumed message: " + message);//dependency null here
}
The dependency is properly autowired when constructor is called. But later on when consume method is called by spring kafka listener then dependency is null. Both times the object of Kafka consumer is the same. Dep is a #Service class with a basic print method.How is this possible?
But when I add another containerfactory provider method it works and the dependency is not null.
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> customKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(consumerFactory());
return factory;
}
using this as containerFactory works.
What changed?

How to write Unit test for #KafkaListener?

Trying to figure out if I can write unit test for #KafkaListener using spring-kafka and spring-kafka-test.
My Listener class.
public class MyKafkaListener {
#Autowired
private MyMessageProcessor myMessageProcessor;
#KafkaListener(topics = "${kafka.topic.01}", groupId = "SF.CLIENT", clientIdPrefix = "SF.01", containerFactory = "myMessageListenerContainerFactory")
public void myMessageListener(MyMessage message) {
myMessageProcessor.process(message);
log.info("MyMessage processed");
}}
My Test class :
#RunWith(SpringRunner.class)
#DirtiesContext
#EmbeddedKafka(partitions = 1, topics = {"I1.Topic.json.001"})
#ContextConfiguration(classes = {TestKafkaConfig.class})
public class MyMessageConsumersTest {
#Autowired
private MyMessageProcessor myMessageProcessor;
#Value("${kafka.topic.01}")
private String TOPIC_01;
#Autowired
private KafkaTemplate<String, MyMessage> messageProducer;
#Test
public void testSalesforceMessageListner() {
MyMessageConsumers myMessageConsumers = new MyMessageConsumers(mockService);
messageProducer.send(TOPIC_01, "MessageID", new MyMessage());
verify(myMessageProcessor, times(1)).process(any(MyMessage.class));
}}
My Test config class :
#Configuration
#EnableKafka
public class TestKafkaConfig {
#Bean
public MyMessageProcessor myMessageProcessor() {
return mock(MyMessageProcessor.class);
}
#Bean
public KafkaEmbedded kafkaEmbedded() {
return new KafkaEmbedded(1, true, 1, "I1.Topic.json.001");
}
//Consumer
#Bean
public ConsumerFactory<String, MyMessage> myMessageConsumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEmbedded().getBrokersAsString());
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new JsonDeserializer<>(MyMessage.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, MyMessage> myMessageListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, MyMessage> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(myMessageConsumerFactory());
return factory;
}
//Producer
#Bean
public ProducerFactory<String, MyMessage> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEmbedded().getBrokersAsString());
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaMessageSerializer.class);
return new DefaultKafkaProducerFactory<>(props);
}
#Bean
public KafkaTemplate<String, MyMessage> messageProducer() {
return new KafkaTemplate<>(producerFactory());
}
}
Is there any simple way to make this work ?
Or should I do the testing of #KafkaListener in some other way ? In unit test, how do I ensure #KafkaListener is invoked when a new message is arrived in Kafka.
how do I ensure #KafkaListener is invoked when a new message is arrived in Kafka.
Well, this is essentially a Framework responsibility to test such a functionality. In your case you need just concentrate on the business logic and unit test exactly your custom code, but not that one compiled in the Framework. In addition there is not goo point to test the #KafkaListener method which just logs incoming messages. It is definitely going to be too hard to find the hook for test-case verification.
On the other hand I really believe that business logic in your #KafkaListener method is much complicated than you show. So, it might be really better to verify your custom code (e.g. DB insert, some other service call etc.) called from that method rather than try to figure out the hook exactly for the myMessageListener().
What you do with the mock(MyMessageProcessor.class) is really a good way for business logic verification. Only what is wrong in your code is about that duplication for the EmbeddedKafka: you use an annotation and you also declare a #Bean in the config. You should think about removing one of them. Although it isn't clear where is your production code, which is really free from the embedded Kafka. Otherwise, if everything is in the test scope, I don't see any problems with your consumer and producer factories configuration. You definitely have a minimal possible config for the #KafkaListener and KafkaTemplate. Only what you need is to remove a #EmbeddedKafka do not start the broker twice.
You can wrap the listener in your test case.
Given
#SpringBootApplication
public class So52783066Application {
public static void main(String[] args) {
SpringApplication.run(So52783066Application.class, args);
}
#KafkaListener(id = "so52783066", topics = "so52783066")
public void listen(String in) {
System.out.println(in);
}
}
then
#RunWith(SpringRunner.class)
#SpringBootTest
public class So52783066ApplicationTests {
#ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "so52783066");
#Autowired
private KafkaListenerEndpointRegistry registry;
#Autowired
private KafkaTemplate<String, String> template;
#Before
public void setup() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
}
#Test
public void test() throws Exception {
ConcurrentMessageListenerContainer<?, ?> container = (ConcurrentMessageListenerContainer<?, ?>) registry
.getListenerContainer("so52783066");
container.stop();
#SuppressWarnings("unchecked")
AcknowledgingConsumerAwareMessageListener<String, String> messageListener = (AcknowledgingConsumerAwareMessageListener<String, String>) container
.getContainerProperties().getMessageListener();
CountDownLatch latch = new CountDownLatch(1);
container.getContainerProperties()
.setMessageListener(new AcknowledgingConsumerAwareMessageListener<String, String>() {
#Override
public void onMessage(ConsumerRecord<String, String> data, Acknowledgment acknowledgment,
Consumer<?, ?> consumer) {
messageListener.onMessage(data, acknowledgment, consumer);
latch.countDown();
}
});
container.start();
template.send("so52783066", "foo");
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
}
}
Here is my working solution for the Consumer, based on your code. Thank you :-)
The Configuration is the following:
#TestConfiguration
#EnableKafka
#Profile("kafka_test")
public class KafkaTestConfig {
private static Logger log = LoggerFactory.getLogger(KafkaTestConfig.class);
#Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers;
#Bean
#Primary
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "group-id");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
log.info("Consumer TEST config = {}", props);
return props;
}
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
log.info("Producer TEST config = {}", props);
return props;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<String>());
}
#Bean
public ProducerFactory<String, String> producerFactory() {
DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>(producerConfigs());
return pf;
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
ConsumerFactory<String, String> kafkaConsumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.getContainerProperties().setAckOnError(false);
factory.setConcurrency(2);
return factory;
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<>(producerFactory());
return kafkaTemplate;
}
#Bean
public KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry() {
KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry = new KafkaListenerEndpointRegistry();
return kafkaListenerEndpointRegistry;
}
}
Place all the beans you need to include in the test in a different class:
#TestConfiguration
#Profile("kafka_test")
#EnableKafka
public class KafkaBeansConfig {
#Bean
public MyProducer myProducer() {
return new MyProducer();
}
// more beans
}
I created a BaseKafkaConsumerTest class to reuse it :
#ExtendWith(SpringExtension.class)
#TestPropertySource(properties = { "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}" })
#TestInstance(Lifecycle.PER_CLASS)
#DirtiesContext
#ContextConfiguration(classes = KafkaTestConfig.class)
#ActiveProfiles("kafka_test")
public class BaseKafkaConsumerTest {
#Autowired
protected EmbeddedKafkaBroker embeddedKafka;
#Value("${spring.embedded.kafka.brokers}")
private String brokerAddresses;
#Autowired
protected KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
#Autowired
protected KafkaTemplate<String, String> senderTemplate;
public void setUp() {
embeddedKafka.brokerProperty("controlled.shutdown.enable", true);
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
.getListenerContainers()) {
System.err.println(messageListenerContainer.getContainerProperties().toString());
ContainerTestUtils.waitForAssignment(messageListenerContainer, embeddedKafka.getPartitionsPerTopic());
}
}
#AfterAll
public void tearDown() {
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
.getListenerContainers()) {
messageListenerContainer.stop();
}
embeddedKafka.getKafkaServers().forEach(b -> b.shutdown());
embeddedKafka.getKafkaServers().forEach(b -> b.awaitShutdown());
}
}
Extend the base class to stest your consumer:
#EmbeddedKafka(topics = MyConsumer.TOPIC_NAME)
#Import(KafkaBeansConfig.class)
public class MYKafkaConsumerTest extends BaseKafkaConsumerTest {
private static Logger log = LoggerFactory.getLogger(PaymentMethodsKafkaConsumerTest.class);
#Autowired
private MyConsumer myConsumer;
// mocks with #MockBean
#Configuration
#ComponentScan({ "com.myfirm.kafka" })
static class KafkaLocalTestConfig {
}
#BeforeAll
public void setUp() {
super.setUp();
}
#Test
public void testMessageIsReceived() throws Exception {
//mocks
String jsonPayload = "{\"id\":\"12345\","cookieDomain\":"helloworld"}";
ListenableFuture<SendResult<String, String>> future =
senderTemplate.send(MyConsumer.TOPIC_NAME, jsonPayload);
Thread.sleep(10000);
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
#Override
public void onSuccess(SendResult<String, String> result) {
log.info("successfully sent message='{}' with offset={}", jsonPayload,
result.getRecordMetadata().offset());
}
#Override
public void onFailure(Throwable ex) {
log.error("unable to send message='{}'", jsonPayload, ex);
}
});
Mockito.verify(myService, Mockito.times(1))
.update(Mockito.any(MyDetails.class));
}
As I read in other posts, donĀ“t test the business logic this way. Just that the calls are made.
If you want to write integration tests using EmbeddedKafka, then you can do something like this.
Assume we have some KafkaListener, which accepts a RequestDto as a Payload.
In your test class you should create a TestConfiguration in order to create producer beans and to autowire KafkaTemplate into your test. Also notice, that instead of autowiring consumer, we inject a consumer SpyBean.
In someTest method we are creating a latch, and setting up the consumer listener method so that when it is called, the latch will be opened and assertions will take place only after the listener have received the Payload.
Also notice any() ?: RequestDto() line. You should use elvis operator with any() only if you are using Mockito's any() with non-null Kotlin method arguments, because any() firstly returns null.
#EnableKafka
#SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
#EmbeddedKafka(partitions = 10, brokerProperties = ["listeners=PLAINTEXT://localhost:9092", "port=9092"])
class KafkaIgniteApplicationTests {
#SpyBean
private lateinit var consumer: Consumer
#TestConfiguration
class Config {
#Value("\${spring.kafka.consumer.bootstrap-servers}")
private lateinit var servers: String
fun producerConfig(): Map<String, Any> {
val props = mutableMapOf<String, Any>()
props[ProducerConfig.BOOTSTRAP_SERVERS_CONFIG] = servers
props[ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG] = StringSerializer::class.java
props[ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG] = StringSerializer::class.java
return props
}
#Bean
fun producerFactory(): ProducerFactory<String, String> {
return DefaultKafkaProducerFactory(producerConfig())
}
#Bean
fun kafkaTemplate(producerFactory: ProducerFactory<String, String>): KafkaTemplate<String, String> {
return KafkaTemplate(producerFactory)
}
}
#Autowired
private lateinit var kafkaTemplate: KafkaTemplate<String, String>
#Test
fun someTest() {
val lock = CountDownLatch(1)
`when`(consumer.receive(any() ?: RequestDto())).thenAnswer {
it.callRealMethod()
lock.countDown()
}
val request = "{\"value\":\"1\"}"
kafkaTemplate.send(TOPIC, request)
lock.await(1000, TimeUnit.MILLISECONDS)
verify(consumer).receive(RequestDto().apply { value = BigDecimal.ONE })
}
}
In unit test, how do I ensure #KafkaListener is invoked when a new message is arrived in Kafka.
Instead of using Awaitility or CountDownLatch approach , a more easy way is to make the actual #KafkaListener bean as the mockito spy using #SpyBean. Spy basically allows you to record all interactions made on an actual bean instance such that you can verify its interactions later. Together with the timeout verify feature of the mockito , you can ensure that the verification will be done over and over until certain timeout after the producer send out the message.
Something like :
#SpringBootTest(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
#EmbeddedKafka(topics = {"fooTopic"})
public class MyMessageConsumersTest {
#SpyBean
private MyKafkaListener myKafkaListener;
#Captor
private ArgumentCaptor<MyMessage> myMessageCaptor;
#Test
public void test(){
//create KafkaTemplate to send some message to the topic...
verify(myKafkaListener, timeout(5000)). myMessageListener(myMessageCaptor.capture());
//assert the KafkaListener is configured correctly such that it is invoked with the expected parameter
assertThat(myMessageCaptor.getValue()).isEqualTo(xxxxx);
}

exception when enabling solr repositories in solr kafka integration

I am trying to connect to solr and trying to save data read from kafka topic. When I am tying to do #EnableSolrRepositories I am getting following exception
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myDocRepository': Could not resolve matching constructor (hint: specify index/type/name arguments for simple parameters to avoid type ambiguities)
EDITED:
this is my repository
#Component
#Repository
public interface MyDocRepository extends SolrCrudRepository<Assert, String> {
}
My config class contains both kafka and solr configs
#Configuration
#EnableKafka
#Component
#EnableSolrRepositories(basePackageClasses = MyDocRepository.class)
public class ReceiverConfig {
#Value("${kafka.servers.bootstrap}")
private String bootstrapServers;
#Value("${solr.server.url}")
private String solrUrl;
#Bean
public Map<String, Object> consumerConfigs() {
System.out.println("consumer configs : ");
Map<String, Object> props = new HashMap<>();
// list of host:port pairs used for establishing the initial connections
// to the Kakfa cluster
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
// consumer groups allow a pool of processes to divide the work of
// consuming and processing records
props.put(ConsumerConfig.GROUP_ID_CONFIG, "helloworld");
return props;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
System.out.println("consumer factory");
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
System.out.println("kafka listerner configs");
ConcurrentKafkaListenerContainerFactory<String, String> factory
= new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public Receiver receiver() {
System.out.println("consumer receiver");
return new Receiver();
}
#Bean
public HttpSolrClientFactoryBean solrServerFactoryBean() {
HttpSolrClientFactoryBean factory = new HttpSolrClientFactoryBean();
System.out.println(solrUrl);
factory.setUrl(solrUrl);
return factory;
}
#Bean
public SolrTemplate solrTemplate() throws Exception {
return new SolrTemplate(solrServerFactoryBean().getObject());
}
}
my receiver class for kafka
public class Receiver {
#Autowired
private IndexServiceImpl indexServiceImpl;
private static final Logger LOGGER = LoggerFactory.getLogger(Receiver.class);
private CountDownLatch latch = new CountDownLatch(1);
#KafkaListener(topics = "${kafka.topic.helloworld}")
public void receive(String message) throws IOException {
System.out.println("started");
LOGGER.info("received message='{}'", message);
latch.countDown();
Assert assertObj = new ObjectMapper().readValue(message, Assert.class);
indexServiceImpl.save(assertObj);
}
public CountDownLatch getLatch() {
return latch;
}
}
my service class
#Component
#Service
public class IndexServiceImpl implements IndexService {
#Autowired
MyDocRepository myDocRepository;
#Override
public String save(Assert term) {
myDocRepository.save(term);
return "success";
}
}
can anyone help me solving this issue.

Categories

Resources