How to launch kafka consumer before producer - java

I have a banch of microservices interacting with kafka topic. One of the microservice should consume two integers and after that sum them and send to the topic. The problem is that I cannot configure microservice so that consumer to be launched before the producer. My code is as follows:
#SpringBootApplication
public class AdderApplication {
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(AdderApplication.class, args);
AdderConsumer consumer = context.getBean(AdderConsumer.class);
AdderProducer producer = context.getBean(AdderProducer.class);
producer.sumTwoIntegers();
}
#Component
public class AdderConsumer extends Controller {
private CountDownLatch latch = new CountDownLatch(3);
#KafkaListener(topics = "${kafka.topic.name}")
public void listenToPartitionWithOffset(#Payload Integer message) {
if (message != null) {
list.add(message);
isProduce = true;
System.out.println(list);
}
}
#Component
public class AdderProducer extends Controller {
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#Value("${kafka.topic.name}")
private String topicName;
public void sumTwoIntegers() {
// logic
}
private void sendMessage(String message) {
// logic
}
#Configuration
#EnableKafka
public class KafkaConfig {
#Value("${kafka.boot.server}")
private String kafkaServer;
#Value("${kafka.consumer.group.id}")
private String kafkaGroupId;
#Bean
public LoggingErrorHandler errorHandler(){
return new LoggingErrorHandler();
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerConfig());
}
#Bean
public ProducerFactory<String, String> producerConfig() {
Map<String, Object> config = new HashMap<>();
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<>(config);
}
#Bean
public ConsumerFactory<String, Integer> consumerConfig() {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
config.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaGroupId);
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
return new DefaultKafkaConsumerFactory<>(config);
}
#Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, Integer>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Integer> listener = new ConcurrentKafkaListenerContainerFactory<>();
listener.setConsumerFactory(consumerConfig());
listener.setErrorHandler(errorHandler());
return listener;
}
I debugged the code and it calls both producer and listener, however I need listener to receive both integers first and only after that to call producer method.
I will appreciate your thoughts.

I found the decision. I implemented timer for producer and it periodically invoked when application is running.
So when it invokes for the firs time with no data to process it posts nothing, then customer consumes data and the second invocation of producer posts the necessary data to the topic.
THe other solution is to call producer method from consumer.

Related

How to test if method with #KafkaListener is being called

I'm really struggling to write a test to check if my Kafka Consumer is being correctly called when messages are sent to it's designated topic.
My consumer:
#Service
#Slf4j
#AllArgsConstructor(onConstructor = #__(#Autowired))
public class ProcessingConsumer {
private AppService appService;
#KafkaListener(
topics = "${topic}",
containerFactory = "processingConsumerContainerFactory")
public void listen(ConsumerRecord<Key, Value> message, Acknowledgment ack) {
try {
appService.processMessage(message);
ack.acknowledge();
} catch (Throwable t) {
log.error("error while processing message!", t);
}
}
}
My consumer config:
#EnableKafka
#Configuration
public class ProcessingCosumerConfig {
#Value("${spring.kafka.schema-registry-url}")
private String schemaRegistryUrl;
private KafkaProperties props;
public ProcessingCosumerConfig(KafkaProperties kafkaProperties) {
this.props = kafkaProperties;
}
public Map<String, Object> deserializerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
return props;
}
private KafkaAvroDeserializer getKafkaAvroDeserializer(Boolean isKey) {
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
kafkaAvroDeserializer.configure(deserializerConfigs(), isKey);
return kafkaAvroDeserializer;
}
private DefaultKafkaConsumerFactory consumerFactory() {
return new DefaultKafkaConsumerFactory<>(
props.buildConsumerProperties(),
getKafkaAvroDeserializer(true),
getKafkaAvroDeserializer(false));
}
#Bean(name = "processingConsumerContainerFactory")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Key, Value>>
kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Key, Value>
factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.getContainerProperties().setAckOnError(false);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
factory.setErrorHandler(new SeekToCurrentErrorHandler());
return factory;
}
}
Finally, my (wannabe) test:
#DirtiesContext
public class ProcessingConsumerTest extends BaseIntegrationTest{
#Autowired private ProcessingProducerFixture processingProducer;
#Autowired private ProcessingConsumer processingConsumer;
#org.springframework.beans.factory.annotation.Value("${topic}")
String topic;
#Test
public void consumer_shouldConsumeMessages_whenMessagesAreSent() throws Exception{
Thread.sleep(1000);
ProducerRecord<Key, Value> message = new ProducerRecord<>(topic, new Key("b"), new Value("a", "b", "c", "d"));
processingProducer.send(message);
}
}
And that's about it for all I have so far.
I've tried checking if this approach gets to the consumer manually using debug and also even just putting simple prints there but the execution simply doesn't seems to get there. Also, if it could be somehow called correctly by my tests, I have no idea what to do to actually assert it in the actual test.
Inject a mock AppService into the listener and verify its processMessage() was called.

How to write Unit test for #KafkaListener?

Trying to figure out if I can write unit test for #KafkaListener using spring-kafka and spring-kafka-test.
My Listener class.
public class MyKafkaListener {
#Autowired
private MyMessageProcessor myMessageProcessor;
#KafkaListener(topics = "${kafka.topic.01}", groupId = "SF.CLIENT", clientIdPrefix = "SF.01", containerFactory = "myMessageListenerContainerFactory")
public void myMessageListener(MyMessage message) {
myMessageProcessor.process(message);
log.info("MyMessage processed");
}}
My Test class :
#RunWith(SpringRunner.class)
#DirtiesContext
#EmbeddedKafka(partitions = 1, topics = {"I1.Topic.json.001"})
#ContextConfiguration(classes = {TestKafkaConfig.class})
public class MyMessageConsumersTest {
#Autowired
private MyMessageProcessor myMessageProcessor;
#Value("${kafka.topic.01}")
private String TOPIC_01;
#Autowired
private KafkaTemplate<String, MyMessage> messageProducer;
#Test
public void testSalesforceMessageListner() {
MyMessageConsumers myMessageConsumers = new MyMessageConsumers(mockService);
messageProducer.send(TOPIC_01, "MessageID", new MyMessage());
verify(myMessageProcessor, times(1)).process(any(MyMessage.class));
}}
My Test config class :
#Configuration
#EnableKafka
public class TestKafkaConfig {
#Bean
public MyMessageProcessor myMessageProcessor() {
return mock(MyMessageProcessor.class);
}
#Bean
public KafkaEmbedded kafkaEmbedded() {
return new KafkaEmbedded(1, true, 1, "I1.Topic.json.001");
}
//Consumer
#Bean
public ConsumerFactory<String, MyMessage> myMessageConsumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEmbedded().getBrokersAsString());
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new JsonDeserializer<>(MyMessage.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, MyMessage> myMessageListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, MyMessage> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(myMessageConsumerFactory());
return factory;
}
//Producer
#Bean
public ProducerFactory<String, MyMessage> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEmbedded().getBrokersAsString());
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaMessageSerializer.class);
return new DefaultKafkaProducerFactory<>(props);
}
#Bean
public KafkaTemplate<String, MyMessage> messageProducer() {
return new KafkaTemplate<>(producerFactory());
}
}
Is there any simple way to make this work ?
Or should I do the testing of #KafkaListener in some other way ? In unit test, how do I ensure #KafkaListener is invoked when a new message is arrived in Kafka.
how do I ensure #KafkaListener is invoked when a new message is arrived in Kafka.
Well, this is essentially a Framework responsibility to test such a functionality. In your case you need just concentrate on the business logic and unit test exactly your custom code, but not that one compiled in the Framework. In addition there is not goo point to test the #KafkaListener method which just logs incoming messages. It is definitely going to be too hard to find the hook for test-case verification.
On the other hand I really believe that business logic in your #KafkaListener method is much complicated than you show. So, it might be really better to verify your custom code (e.g. DB insert, some other service call etc.) called from that method rather than try to figure out the hook exactly for the myMessageListener().
What you do with the mock(MyMessageProcessor.class) is really a good way for business logic verification. Only what is wrong in your code is about that duplication for the EmbeddedKafka: you use an annotation and you also declare a #Bean in the config. You should think about removing one of them. Although it isn't clear where is your production code, which is really free from the embedded Kafka. Otherwise, if everything is in the test scope, I don't see any problems with your consumer and producer factories configuration. You definitely have a minimal possible config for the #KafkaListener and KafkaTemplate. Only what you need is to remove a #EmbeddedKafka do not start the broker twice.
You can wrap the listener in your test case.
Given
#SpringBootApplication
public class So52783066Application {
public static void main(String[] args) {
SpringApplication.run(So52783066Application.class, args);
}
#KafkaListener(id = "so52783066", topics = "so52783066")
public void listen(String in) {
System.out.println(in);
}
}
then
#RunWith(SpringRunner.class)
#SpringBootTest
public class So52783066ApplicationTests {
#ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, "so52783066");
#Autowired
private KafkaListenerEndpointRegistry registry;
#Autowired
private KafkaTemplate<String, String> template;
#Before
public void setup() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
}
#Test
public void test() throws Exception {
ConcurrentMessageListenerContainer<?, ?> container = (ConcurrentMessageListenerContainer<?, ?>) registry
.getListenerContainer("so52783066");
container.stop();
#SuppressWarnings("unchecked")
AcknowledgingConsumerAwareMessageListener<String, String> messageListener = (AcknowledgingConsumerAwareMessageListener<String, String>) container
.getContainerProperties().getMessageListener();
CountDownLatch latch = new CountDownLatch(1);
container.getContainerProperties()
.setMessageListener(new AcknowledgingConsumerAwareMessageListener<String, String>() {
#Override
public void onMessage(ConsumerRecord<String, String> data, Acknowledgment acknowledgment,
Consumer<?, ?> consumer) {
messageListener.onMessage(data, acknowledgment, consumer);
latch.countDown();
}
});
container.start();
template.send("so52783066", "foo");
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
}
}
Here is my working solution for the Consumer, based on your code. Thank you :-)
The Configuration is the following:
#TestConfiguration
#EnableKafka
#Profile("kafka_test")
public class KafkaTestConfig {
private static Logger log = LoggerFactory.getLogger(KafkaTestConfig.class);
#Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers;
#Bean
#Primary
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "group-id");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
log.info("Consumer TEST config = {}", props);
return props;
}
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
log.info("Producer TEST config = {}", props);
return props;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<String>());
}
#Bean
public ProducerFactory<String, String> producerFactory() {
DefaultKafkaProducerFactory<String, String> pf = new DefaultKafkaProducerFactory<>(producerConfigs());
return pf;
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
ConsumerFactory<String, String> kafkaConsumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.getContainerProperties().setAckOnError(false);
factory.setConcurrency(2);
return factory;
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<>(producerFactory());
return kafkaTemplate;
}
#Bean
public KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry() {
KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry = new KafkaListenerEndpointRegistry();
return kafkaListenerEndpointRegistry;
}
}
Place all the beans you need to include in the test in a different class:
#TestConfiguration
#Profile("kafka_test")
#EnableKafka
public class KafkaBeansConfig {
#Bean
public MyProducer myProducer() {
return new MyProducer();
}
// more beans
}
I created a BaseKafkaConsumerTest class to reuse it :
#ExtendWith(SpringExtension.class)
#TestPropertySource(properties = { "spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}" })
#TestInstance(Lifecycle.PER_CLASS)
#DirtiesContext
#ContextConfiguration(classes = KafkaTestConfig.class)
#ActiveProfiles("kafka_test")
public class BaseKafkaConsumerTest {
#Autowired
protected EmbeddedKafkaBroker embeddedKafka;
#Value("${spring.embedded.kafka.brokers}")
private String brokerAddresses;
#Autowired
protected KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
#Autowired
protected KafkaTemplate<String, String> senderTemplate;
public void setUp() {
embeddedKafka.brokerProperty("controlled.shutdown.enable", true);
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
.getListenerContainers()) {
System.err.println(messageListenerContainer.getContainerProperties().toString());
ContainerTestUtils.waitForAssignment(messageListenerContainer, embeddedKafka.getPartitionsPerTopic());
}
}
#AfterAll
public void tearDown() {
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
.getListenerContainers()) {
messageListenerContainer.stop();
}
embeddedKafka.getKafkaServers().forEach(b -> b.shutdown());
embeddedKafka.getKafkaServers().forEach(b -> b.awaitShutdown());
}
}
Extend the base class to stest your consumer:
#EmbeddedKafka(topics = MyConsumer.TOPIC_NAME)
#Import(KafkaBeansConfig.class)
public class MYKafkaConsumerTest extends BaseKafkaConsumerTest {
private static Logger log = LoggerFactory.getLogger(PaymentMethodsKafkaConsumerTest.class);
#Autowired
private MyConsumer myConsumer;
// mocks with #MockBean
#Configuration
#ComponentScan({ "com.myfirm.kafka" })
static class KafkaLocalTestConfig {
}
#BeforeAll
public void setUp() {
super.setUp();
}
#Test
public void testMessageIsReceived() throws Exception {
//mocks
String jsonPayload = "{\"id\":\"12345\","cookieDomain\":"helloworld"}";
ListenableFuture<SendResult<String, String>> future =
senderTemplate.send(MyConsumer.TOPIC_NAME, jsonPayload);
Thread.sleep(10000);
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
#Override
public void onSuccess(SendResult<String, String> result) {
log.info("successfully sent message='{}' with offset={}", jsonPayload,
result.getRecordMetadata().offset());
}
#Override
public void onFailure(Throwable ex) {
log.error("unable to send message='{}'", jsonPayload, ex);
}
});
Mockito.verify(myService, Mockito.times(1))
.update(Mockito.any(MyDetails.class));
}
As I read in other posts, donĀ“t test the business logic this way. Just that the calls are made.
If you want to write integration tests using EmbeddedKafka, then you can do something like this.
Assume we have some KafkaListener, which accepts a RequestDto as a Payload.
In your test class you should create a TestConfiguration in order to create producer beans and to autowire KafkaTemplate into your test. Also notice, that instead of autowiring consumer, we inject a consumer SpyBean.
In someTest method we are creating a latch, and setting up the consumer listener method so that when it is called, the latch will be opened and assertions will take place only after the listener have received the Payload.
Also notice any() ?: RequestDto() line. You should use elvis operator with any() only if you are using Mockito's any() with non-null Kotlin method arguments, because any() firstly returns null.
#EnableKafka
#SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
#EmbeddedKafka(partitions = 10, brokerProperties = ["listeners=PLAINTEXT://localhost:9092", "port=9092"])
class KafkaIgniteApplicationTests {
#SpyBean
private lateinit var consumer: Consumer
#TestConfiguration
class Config {
#Value("\${spring.kafka.consumer.bootstrap-servers}")
private lateinit var servers: String
fun producerConfig(): Map<String, Any> {
val props = mutableMapOf<String, Any>()
props[ProducerConfig.BOOTSTRAP_SERVERS_CONFIG] = servers
props[ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG] = StringSerializer::class.java
props[ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG] = StringSerializer::class.java
return props
}
#Bean
fun producerFactory(): ProducerFactory<String, String> {
return DefaultKafkaProducerFactory(producerConfig())
}
#Bean
fun kafkaTemplate(producerFactory: ProducerFactory<String, String>): KafkaTemplate<String, String> {
return KafkaTemplate(producerFactory)
}
}
#Autowired
private lateinit var kafkaTemplate: KafkaTemplate<String, String>
#Test
fun someTest() {
val lock = CountDownLatch(1)
`when`(consumer.receive(any() ?: RequestDto())).thenAnswer {
it.callRealMethod()
lock.countDown()
}
val request = "{\"value\":\"1\"}"
kafkaTemplate.send(TOPIC, request)
lock.await(1000, TimeUnit.MILLISECONDS)
verify(consumer).receive(RequestDto().apply { value = BigDecimal.ONE })
}
}
In unit test, how do I ensure #KafkaListener is invoked when a new message is arrived in Kafka.
Instead of using Awaitility or CountDownLatch approach , a more easy way is to make the actual #KafkaListener bean as the mockito spy using #SpyBean. Spy basically allows you to record all interactions made on an actual bean instance such that you can verify its interactions later. Together with the timeout verify feature of the mockito , you can ensure that the verification will be done over and over until certain timeout after the producer send out the message.
Something like :
#SpringBootTest(properties = {"spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}"})
#EmbeddedKafka(topics = {"fooTopic"})
public class MyMessageConsumersTest {
#SpyBean
private MyKafkaListener myKafkaListener;
#Captor
private ArgumentCaptor<MyMessage> myMessageCaptor;
#Test
public void test(){
//create KafkaTemplate to send some message to the topic...
verify(myKafkaListener, timeout(5000)). myMessageListener(myMessageCaptor.capture());
//assert the KafkaListener is configured correctly such that it is invoked with the expected parameter
assertThat(myMessageCaptor.getValue()).isEqualTo(xxxxx);
}

exception when enabling solr repositories in solr kafka integration

I am trying to connect to solr and trying to save data read from kafka topic. When I am tying to do #EnableSolrRepositories I am getting following exception
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myDocRepository': Could not resolve matching constructor (hint: specify index/type/name arguments for simple parameters to avoid type ambiguities)
EDITED:
this is my repository
#Component
#Repository
public interface MyDocRepository extends SolrCrudRepository<Assert, String> {
}
My config class contains both kafka and solr configs
#Configuration
#EnableKafka
#Component
#EnableSolrRepositories(basePackageClasses = MyDocRepository.class)
public class ReceiverConfig {
#Value("${kafka.servers.bootstrap}")
private String bootstrapServers;
#Value("${solr.server.url}")
private String solrUrl;
#Bean
public Map<String, Object> consumerConfigs() {
System.out.println("consumer configs : ");
Map<String, Object> props = new HashMap<>();
// list of host:port pairs used for establishing the initial connections
// to the Kakfa cluster
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
// consumer groups allow a pool of processes to divide the work of
// consuming and processing records
props.put(ConsumerConfig.GROUP_ID_CONFIG, "helloworld");
return props;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
System.out.println("consumer factory");
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
System.out.println("kafka listerner configs");
ConcurrentKafkaListenerContainerFactory<String, String> factory
= new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public Receiver receiver() {
System.out.println("consumer receiver");
return new Receiver();
}
#Bean
public HttpSolrClientFactoryBean solrServerFactoryBean() {
HttpSolrClientFactoryBean factory = new HttpSolrClientFactoryBean();
System.out.println(solrUrl);
factory.setUrl(solrUrl);
return factory;
}
#Bean
public SolrTemplate solrTemplate() throws Exception {
return new SolrTemplate(solrServerFactoryBean().getObject());
}
}
my receiver class for kafka
public class Receiver {
#Autowired
private IndexServiceImpl indexServiceImpl;
private static final Logger LOGGER = LoggerFactory.getLogger(Receiver.class);
private CountDownLatch latch = new CountDownLatch(1);
#KafkaListener(topics = "${kafka.topic.helloworld}")
public void receive(String message) throws IOException {
System.out.println("started");
LOGGER.info("received message='{}'", message);
latch.countDown();
Assert assertObj = new ObjectMapper().readValue(message, Assert.class);
indexServiceImpl.save(assertObj);
}
public CountDownLatch getLatch() {
return latch;
}
}
my service class
#Component
#Service
public class IndexServiceImpl implements IndexService {
#Autowired
MyDocRepository myDocRepository;
#Override
public String save(Assert term) {
myDocRepository.save(term);
return "success";
}
}
can anyone help me solving this issue.

Spring Kafka: Multiple Listeners for different objects within an ApplicationContext

Can I please check with the community what is the best way to listen to multiple topics, with each topic containing a message of a different class?
I've been playing around with Spring Kafka for the past couple of days. My thought process so far:
Because you need to pass your deserializer into DefaultKafkaConsumerFactory when initializing a KafkaListenerContainerFactory. This seems to indicate that if I need multiple containers each deserializing a message of a different type, I will not be able to use the #EnableKafka and #KafkaListener annotations.
This leads me to think that the only way to do so would be to instantiate multiple KafkaMessageListenerContainers.
And given that KafkaMessageListenerContainers is single threaded and I need to listen to multiple topics at the same time, I really should be using multiple ConcurrentKafkaMessageListenerContainers.
Would I be on the right track here? Is there a better way to do this?
Thanks!
Here is a very simple example.
// -----------------------------------------------
// Sender
// -----------------------------------------------
#Configuration
public class SenderConfig {
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
......
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
return props;
}
#Bean
public ProducerFactory<String, Class1> producerFactory1() {
return new DefaultKafkaProducerFactory<String, Class1>(producerConfigs());
}
#Bean
public KafkaTemplate<String, Class1> kafkaTemplate1() {
return new KafkaTemplate<>(producerFactory1());
}
#Bean
public Sender1 sender1() {
return new Sender1();
}
//-------- send the second class --------
#Bean
public ProducerFactory<String, Class2> producerFactory2() {
return new DefaultKafkaProducerFactory<String, Class2>(producerConfigs());
}
#Bean
public KafkaTemplate<String, Class2> kafkaTemplate2() {
return new KafkaTemplate<>(producerFactory2());
}
#Bean
public Sender2 sender2() {
return new Sender2();
}
}
public class Sender1 {
#Autowired
private KafkaTemplate<String, Class1> kafkaTemplate1;
public void send(String topic, Class1 c1) {
kafkaTemplate1.send(topic, c1);
}
}
public class Sender2 {
#Autowired
private KafkaTemplate<String, Class2> kafkaTemplate2;
public void send(String topic, Class2 c2) {
kafkaTemplate2.send(topic, c2);
}
}
// -----------------------------------------------
// Receiver
// -----------------------------------------------
#Configuration
#EnableKafka
public class ReceiverConfig {
#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
......
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
return props;
}
#Bean
public ConsumerFactory<String, Class1> consumerFactory1() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<>(Class1.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Class1> kafkaListenerContainerFactory1() {
ConcurrentKafkaListenerContainerFactory<String, Class1> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory1());
return factory;
}
#Bean
public Receiver1 receiver1() {
return new Receiver1();
}
//-------- add the second listener
#Bean
public ConsumerFactory<String, Class2> consumerFactory2() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new JsonDeserializer<>(Class2.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Class2> kafkaListenerContainerFactory2() {
ConcurrentKafkaListenerContainerFactory<String, Class2> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory2());
return factory;
}
#Bean
public Receiver2 receiver2() {
return new Receiver2();
}
}
public class Receiver1 {
#KafkaListener(id="listener1", topics = "topic1", containerFactory = "kafkaListenerContainerFactory1")
public void receive(Class1 c1) {
LOGGER.info("Received c1");
}
}
public class Receiver2 {
#KafkaListener(id="listener2", topics = "topic2", containerFactory = "kafkaListenerContainerFactory2")
public void receive(Class2 c2) {
LOGGER.info("Received c2");
}
}
You can use the annotations, you would just need to use a different listener container factory for each.
The framework will create a listener container for each annotation.
You can also listen to multiple topics on a single-threaded container but they would be processed, er, on a single thread.
Take a look at the code from my SpringOne Platform talk last year - you might want to look at app6, which shows how to use a MessageConverter instead of a deserializer, which might help simplify your configuration.
I would like to use the code below to apply your sence
#Configuration
#EnableKafka
public class ConsumerConfig {
#Value("${kafka.bootstrap-servers}")
private String bootstrapServers;
#Value("${kafka.group-id}")
private String groupId;
/**
* Configuration of Consumer properties.
*
* #return
*/
//#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
return props;
}
//#Bean
public ConsumerFactory<String, ClassA> consumerFactory1() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new ClassA());
}
/**
* Kafka Listener Container Factory.
* #return
*/
#Bean("kafkaListenerContainerFactory1")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, ClassA>> kafkaListenerContainerFactory1() {
ConcurrentKafkaListenerContainerFactory<String, ClassA> factory;
factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory1());
return factory;
}
//#Bean
public ConsumerFactory<String, ClassB> consumerFactory2() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
new ClassB());
}
/**
* Kafka Listener Container Factory.
* #return
*/
#Bean("kafkaListenerContainerFactory2")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, ClassB>> kafkaListenerContainerFactory2() {
ConcurrentKafkaListenerContainerFactory<String, ClassB> factory;
factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory2());
return factory;
}
#Bean
public ReceiverClass receiver() {
return new ReceiverClass();
}
class ReceiverClass {
#KafkaListener(topics = "topic1", group = "group-id-test",
containerFactory = "kafkaListenerContainerFactory1")
public void receiveTopic1(ClassA a) {
System.out.println("ReceiverClass.receive() ClassA : " + a);
}
#KafkaListener(topics = "topic2", group = "group-id-test",
containerFactory = "kafkaListenerContainerFactory2")
public void receiveTopic2(ClassB b) {
System.out.println("ReceiverClass.receive() Classb : " + b);
}
}
class ClassB implements Deserializer {
#Override
public void configure(Map configs, boolean isKey) {
// TODO Auto-generated method stub
}
#Override
public Object deserialize(String topic, byte[] data) {
// TODO Auto-generated method stub
return null;
}
#Override
public void close() {
// TODO Auto-generated method stub
}
}
class ClassA implements Deserializer {
#Override
public void configure(Map configs, boolean isKey) {
// TODO Auto-generated method stub
}
#Override
public Object deserialize(String topic, byte[] data) {
// TODO Auto-generated method stub
return null;
}
#Override
public void close() {
// TODO Auto-generated method stub
}
}
}

How to implement a Kafka consumer in a Spring MVC web app (using Spring Boot)

I would like to create a consumer inside a Spring MVC web app. Basically, I'd like the web app to listen to some topics on Kafka and take some action based on the received messages.
All the examples I've seen so far are using either a standalone app with an infinite loop (using plain java) or inside a unit test (using spring):
#Autowired
private Listener listener;
#Autowired
private KafkaTemplate<Integer, String> template;
#Test
public void testSimple() throws Exception {
template.send("annotated1", 0, "foo");
template.flush();
assertTrue(this.listener.latch1.await(10, TimeUnit.SECONDS));
}
#Configuration
#EnableKafka
public class Config {
#Bean
ConcurrentKafkaListenerContainerFactory<Integer, String>
kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Integer, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public ConsumerFactory<Integer, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafka.getBrokersAsString());
...
return props;
}
#Bean
public Listener listener() {
return new Listener();
}
#Bean
public ProducerFactory<Integer, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafka.getBrokersAsString());
...
return props;
}
#Bean
public KafkaTemplate<Integer, String> kafkaTemplate() {
return new KafkaTemplate<Integer, String>(producerFactory());
}
}
public class Listener {
private final CountDownLatch latch1 = new CountDownLatch(1);
#KafkaListener(id = "foo", topics = "annotated1")
public void listen1(String foo) {
this.latch1.countDown();
}
}
What would be the best place to create the listener? Should I put it there:
#SpringBootApplication
public class Application {
#Autowired
private Listener listener;
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}

Categories

Resources