I am trying to build a Java Spring Boot application that would post & get the messages from Confluent Cloud Kafka.
I followed the article for publishing a Kafka message into Confluent Cloud and it works.
Below is the implementation
KafkaController.java
package com.seroter.confluentboot.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.seroter.confluentboot.dto.Product;
import com.seroter.confluentboot.engine.Producer;
#RestController
#RequestMapping(value = "/kafka")
public class KafkaController {
private final Producer producer;
private final com.seroter.confluentboot.engine.Consumer consumer;
#Autowired
KafkaController(Producer producer,com.seroter.confluentboot.engine.Consumer consumer) {
this.producer = producer;
this.consumer=consumer;
}
#PostMapping(value = "/publish")
public void sendMessageToKafkaTopic(#RequestParam("message") String message) {
this.producer.sendMessage(message);
}
#PostMapping(value="/publishJson")
public ResponseEntity<Product> publishJsonMessage(#RequestBody Product product) {
producer.sendJsonMessage(product);
ResponseEntity<Product> responseEntity=new ResponseEntity<>(product,HttpStatus.CREATED);
return responseEntity;
}
}
Product.java
package com.seroter.confluentboot.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
#Data
#NoArgsConstructor
#AllArgsConstructor
#JsonPropertyOrder(value = {"product_id","product_name","quantity","price"})
public class Product {
#JsonProperty(value = "product_id")
private int productId;
#JsonProperty(value="product_name")
private String productName;
private int quantity;
private double price;
}
Producer.java
package com.seroter.confluentboot.engine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.messaging.Source;
import org.springframework.messaging.support.GenericMessage;
import org.springframework.stereotype.Service;
import com.seroter.confluentboot.dto.Product;
#Service
#EnableBinding(Source.class)
public class Producer {
private static final Logger logger = LoggerFactory.getLogger(Producer.class);
private static final String TOPIC = "users";
#Autowired
private Source source;
public void sendMessage(String message) {
logger.info(String.format("#### -> Producing message -> %s", message));
this.source.output().send(new GenericMessage<>(message));
}
public void sendJsonMessage(Product product)
{
logger.info(String.format("#### -> Producing message -> %s",product.toString()));
this.source.output().send(new GenericMessage<>(product));
}
}
ConfluentBootApplication.java
package com.seroter.confluentboot;
import org.apache.tomcat.util.net.WriteBuffer.Sink;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.messaging.Source;
import org.springframework.http.ResponseEntity;
import org.springframework.messaging.support.GenericMessage;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.seroter.confluentboot.dto.Product;
#SpringBootApplication
#EnableBinding(Source.class)
#RestController
#RequestMapping(value = "/confluent")
public class ConfluentBootApplication {
#Autowired
private com.seroter.confluentboot.engine.Consumer consumer;
public static void main(String[] args) {
SpringApplication.run(ConfluentBootApplication.class, args);
}
}
application.properties
spring.cloud.stream.kafka.binder.brokers=pkc-epwny.eastus.azure.confluent.cloud:9092
spring.cloud.stream.bindings.output.destination=test
spring.cloud.stream.kafka.binder.configuration.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="user" password="password";
spring.cloud.stream.kafka.binder.configuration.sasl.mechanism=PLAIN
spring.cloud.stream.kafka.binder.configuration.security.protocol=SASL_SSL
server.port=9000
It works
and I could verify
I want to build a Spring Boot Consumer REST Endpoint? How do I do it?
Update:
ConfluentConsumer.java
package com.seroter.confluentboot.controller;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.cloud.stream.messaging.Sink;
import com.seroter.confluentboot.dto.Product;
//#RestController
#EnableBinding(Sink.class)
public class ConfluentConsumer {
#StreamListener(Sink.INPUT)
public void consumeMessage(Product product)
{
System.out.println("******************************");
System.out.println("============= "+product.getProductId()+" ================");
System.out.println("******************************");
}
}
Consumer.java
package com.seroter.confluentboot.engine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Service;
#Service
#PropertySource("classpath:application.properties")
public class Consumer {
private final Logger logger = LoggerFactory.getLogger(Producer.class);
}
I believe what you are trying to do here is, pick the latest message from Kafka consumer via a REST endpoint i.e. you want manually poll the Kafka topic. Publishing a message via a REST endpoint is logical, but consuming messages through an endpoint doesn't sound like a good idea. If you want a queue behavior, you should use RabbitMQ instead of Kafka.
But still, if you want to use Kafka and poll the message manually. You can use one of the below 2 approaches.
Approach 1: Create a ConsumerFactory and get a Consumer from the factory, and then poll Kafka using a Consumer
#Configuration
class KafkaConsumerConfig {
private static final String TOPIC_NAME = "test";
private final String userName = "username";
private final String password = "password";
#Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"pkc-epwny.eastus.azure.confluent.cloud:9092");
props.put(ConsumerConfig.GROUP_ID_CONFIG,"conumer-gp-1");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
props.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=" + userName + " password=" + password);
return new DefaultKafkaConsumerFactory<>(props);
}
#Bean
public Consumer<String, String> createConsumer(ConsumerFactory consumerFactory) {
Consumer consumer = consumerFactory.createConsumer("consumer-group-1", "client-1");
consumer.subscribe(List.of(TOPIC_NAME));
return consumer;
}
}
You can read the topic name, group-id, bootstrap servers, SSL configs, etc. from the application.properties
Now you can consume messages by injecting the consumer in the RestController.
private final Consumer<String, String> consumer;
#Autowired
ConsumerController(Consumer<String, String> consumer) {
this.consumer = consumer;
}
#GetMapping("retrieveMessage")
public String getMessage() {
// Kafka might return more than 1 events so be careful
ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofMillis(1000));
if (!consumerRecords.isEmpty()) {
Iterator<ConsumerRecord<String, String>> iterator = consumerRecords.iterator();
String value = iterator.next().value();
consumer.commitSync();
return value;
} else {
return "no message";
}
}
Approach 2: store the messages in an in-memory queue and then poll the in-memory queue
spring.cloud.stream.bindings.input.destination=test
Then store the messages in a Queue and retrieve it via REST endpoint
#RestController
#EnableBinding(Sink.class)
class ConsumerController {
private final Queue<String> queue;
ConsumerController() {
this.queue = new ConcurrentLinkedQueue<>();
}
#StreamListener(target = Sink.INPUT)
public void consume(String message) {
this.queue.add(message);
}
#GetMapping("getMessage")
public String retrieveMessage() {
return this.queue.poll();
}
}
Cons: you'll lose all the in-memory messages if your application restarts. Thus, storing the messages in a distributed cache such as Redis would be a better solution.
Related
I have this scenario. I have many queues in Azure ServiceBus, the implementation below works, however it is not flexible since would need to replicate for each queue. I would like to change from dynamically forming, maybe as a parameter in the method send() the queue name and the OUTPUT_CHANNEL for #ServiceActivator and #MessagingGateway, It is possible?
import com.azure.spring.cloud.service.servicebus.properties.ServiceBusEntityType;
import com.azure.spring.integration.core.handler.DefaultMessageHandler;
import com.azure.spring.messaging.servicebus.core.ServiceBusTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.integration.annotation.MessagingGateway;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.messaging.MessageHandler;
import org.springframework.stereotype.Component;
#Component
public class TestIntegration {
private static final String OUTPUT_CHANNEL = "output";
private static final String QUEUE_NAME = "myQueue";
#Autowired
private QueueOutboundGateway messagingGateway;
public void send(String message) {
this.messagingGateway.send(message);
}
#Bean
#ServiceActivator(inputChannel = OUTPUT_CHANNEL)
public MessageHandler queueMessageSender(ServiceBusTemplate serviceBusTemplate) {
serviceBusTemplate.setDefaultEntityType(ServiceBusEntityType.QUEUE);
return new DefaultMessageHandler(QUEUE_NAME, serviceBusTemplate);
}
#MessagingGateway(defaultRequestChannel = OUTPUT_CHANNEL)
public interface QueueOutboundGateway {
void send(String text);
}
}
The com.azure.spring.integration.core.handler.DefaultMessageHandler supports a dynamic destination resolution from message headers:
private String toDestination(Message<?> message) {
if (message.getHeaders().containsKey(AzureHeaders.NAME)) {
return message.getHeaders().get(AzureHeaders.NAME, String.class);
}
return this.destination;
}
So, what you need is a #Header(name = AzureHeaders.NAME) String destination argument on your gateway's send() method. There is no reason in the dynamic nature for the OUTPUT_CHANNEL: only one gateway and one service activator for that DefaultMessageHandler is enough. You call send() with payload and target destination as params.
I'm using redis in my springboot rest application to store cache. But the problem I'm facing is once it is stored in redis my api only hits the redis not the database. I've added time out property it didn't work. I've tried CacheManager to get the cache and call CacheEvict to clear the cache and then CachePut to put the data again, but it didn't work. These are the things I've tried so far. I wanted my redis cache to refresh after a given time set by me. Any advice on this? Here is my code below:
package com.dg.repo;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import com.dg.entity.FlightEntity;
public interface FlightRepo extends JpaRepository<FlightEntity, String> {
#Query(value="select distinct txtFlightName\r\n"
+ "from {test-schema}flights", nativeQuery = true)
List<String> getAllFlights();
#Query(value="select distinct txtFlightName from {test-schema}flights \r\n"
+ "where txtFlightName LIKE %:flightname%",nativeQuery = true)
List<String> getListofFlights(#Param("flightname")String flightname);
#Query(value="select distinct txtBookingCode,txtFlightName from {test-schema}flights \r\n"
+ "where txtFlightName LIKE %:flightname%",nativeQuery = true)
List<FlightEntity> getFlightEntity(#Param("flightname")String flightname);
}
package com.dg.repo;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery;
import com.dg.entity.FlightEntity;
public abstract class FlightRepoImpl implements FlightRepo {
RedisTemplate template;
HashOperations hashOperations;
public FlightRepoImpl(RedisTemplate template, HashOperations hashOperations) {
super();
this.template = template;
this.hashOperations = template.opsForHash();
}
#Override
public List<String> getAllFlights() {
return hashOperations.values("FlightModel");
}
#Override
public List<String> getListofFlights(String flightname) {
return (List<String>) hashOperations.get("FlightModel", flightname);
}
}
package com.dg.service;
import org.modelmapper.ModelMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import com.dg.model.FlightModel;
import com.dg.repo.FlightRepo;
public class FlightService {
#Autowired
FlightRepo flightRepo;
#Autowired
ModelMapper modelMapper;
#Scheduled(fixedRate = 50000)
#Caching(evict = {#CacheEvict(value="getFlightList", key="#flightname")})
public FlightModel getFlightByFlightName(String flightName)
{
package com.dg.model;
import java.io.Serializable;
import java.util.List;
public class FlightModel implements Serializable{
private List<Object> listofflightname;
public List<Object> getListofflightname() {
return listofflightname;
}
public void setListofflightname(List<Object> listofflightname) {
this.listofflightname = listofflightname;
}
}
package com.dg.entity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
#Entity
public class FlightEntity {
#Id
#Column(name="txtBookingCode")
private String bookingCode;
#Column(name="txtFlightName")
private String flightname;
public String getBookingCode() {
return bookingCode;
}
public void setBookingCode(String bookingCode) {
this.bookingCode = bookingCode;
}
public String getFlightname() {
return flightname;
}
public void setFlightname(String flightname) {
this.flightname = flightname;
}
}
package com.dg.config;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.data.redis.connection.RedisClusterConfiguration;
import org.springframework.data.redis.connection.jedis.JedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.repository.configuration.EnableRedisRepositories;
import redis.clients.jedis.JedisPoolConfig;
#Configuration
#EnableRedisRepositories
#Profile("test")
public class RedisConfig {
#Value("${spring.redis.cluster.nodes}")
private String nodesProperty;
#Bean
public JedisConnectionFactory jedisConnectionFactory()
{
JedisPoolConfig poolConfig = new JedisPoolConfig();
poolConfig.setMinIdle(2);
poolConfig.setMaxIdle(5);
poolConfig.setMaxTotal(20);
poolConfig.setEvictorShutdownTimeoutMillis(10000);
String [] nodesArray=nodesProperty.split(",");
List<String> nodes = new ArrayList<String>(Arrays.asList(nodesArray));
RedisClusterConfiguration configuration=new RedisClusterConfiguration(nodes);
configuration.setMaxRedirects(100);
JedisConnectionFactory connectionFactory = new JedisConnectionFactory(configuration);
connectionFactory.setPoolConfig(poolConfig);
return connectionFactory;
}
#Bean
public RedisTemplate redisTemplate()
{
RedisTemplate template = new RedisTemplate();
template.setConnectionFactory(jedisConnectionFactory());
return template;
}
}
package com.dg;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
#SpringBootApplication
#EnableCaching
public class RedisTestApplication {
public static void main(String[] args) {
SpringApplication.run(RedisTestApplication.class, args);
}
}
I have Spring Boot Application, with application.properties that look like so
com.mycompany.schedule.test=welcome
com.mycompany.schedule.test2=hi there
I would like these to be deserialized into a Map
I have tried the following, and other variants, but it doesnt seem to work
private Map<String, String> schedules;
#Value("${com.mycompany.schedule}")
public void setSchedules(Map<String, String> values) {
this.schedules = values;
}
Option 1:
In your configuration class read the application.properties file using #PropertySource annotation as below.
MyAppConfiguration.java
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
#Configuration
#PropertySource(name = "customPropertySource", value = "classpath:application.properties")
public class MyAppConfiguration {
}
Then in your POJO (or any other spring component), you can Environment class to get the properties.
MyPojo.java
import java.util.Map;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.io.support.ResourcePropertySource;
import org.springframework.stereotype.Component;
#Component
public class MyPojo {
#Autowired
private ConfigurableEnvironment env;
private Map<String, Object> schedules;
#PostConstruct
public void properties() {
Object propSourceObj = env.getPropertySources().get("customPropertySource");
if (propSourceObj instanceof ResourcePropertySource) {
ResourcePropertySource propSource = (ResourcePropertySource) propSourceObj;
setSchedules(propSource.getSource());
}
System.out.println("Schedules: " + getSchedules());
}
public Map<String, Object> getSchedules() {
return schedules;
}
public void setSchedules(Map<String, Object> schedules) {
this.schedules = schedules;
}
}
Please note that, with this option you are reading the application.properties file twice. If you are ok with it, you can choose option 1. Otherwise, you can choose option 2.
Option 2:
MyAppConfiguration.java
import org.springframework.context.annotation.Configuration;
#Configuration
public class MyAppConfiguration {
}
MyPojo.java
import java.util.Map;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.env.OriginTrackedMapPropertySource;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.stereotype.Component;
#Component
public class MyPojo {
#Autowired
private ConfigurableEnvironment env;
private Map<String, Object> schedules;
#PostConstruct
public void properties() {
Object propSourceObj = env.getPropertySources().get("applicationConfig: [classpath:/application.properties]");
if (propSourceObj instanceof OriginTrackedMapPropertySource) {
OriginTrackedMapPropertySource propSource = (OriginTrackedMapPropertySource) propSourceObj;
setSchedules(propSource.getSource());
}
System.out.println("Schedules: " + getSchedules());
}
public Map<String, Object> getSchedules() {
return schedules;
}
public void setSchedules(Map<String, Object> schedules) {
this.schedules = schedules;
}
}
Edit:
Sorry, Earlier I misunderstood your question. If you know the property prefix, then you can use #ConfigurationProperties as shown below. The above options are to read all properties without knowing the property prefix.
MyAppConfiguration.java
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
#Configuration
public class MyAppConfiguration {
#Autowired
private MyCompanyConfigurationProperties myCompanyConfProps;
#PostConstruct
public void testProperteis() {
System.out.println("My Properties: " + myCompanyConfProps.getSchedule());
}
}
MyCompanyConfigurationProperties.java
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.ConfigurationProperties;
#ConfigurationProperties(prefix = "com.mycompany")
public class MyCompanyConfigurationProperties {
private Map<String, String> schedule = new HashMap<String, String>();
public Map<String, String> getSchedule() {
return schedule;
}
public void setSchedule(Map<String, String> schedule) {
this.schedule = schedule;
}
}
i am actually trying to test my caching mechanism . i am using caffine cache.
Test: i am calling caching method twice and expecting the same result for multiple method invocation. i.e When i cal method second time with same signature it shouldn't cal the method it should get the data from cache.
Problem: My code is actually invoking the method twice . i am mocking my repository. Please guide me, if anyone has solved this kind of problem.
my repo :
public class TemplateRepositoryOracle implements TemplateRepository
#Cacheable("Templates")
#Override
public Optional<NotificationTemplate> getNotificationTemplate(String eventTypeId, String destinationType, String destinationSubType) {}
Test:
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.Ticker;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.cache.caffeine.CaffeineCache;
import org.springframework.cache.support.SimpleCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
#RunWith(SpringJUnit4ClassRunner.class)
#ContextConfiguration
public class TemplateRepositoyOracleTest {
interface TemplateRepository {
#Cacheable("Templates")
Optional<Template> getNotificationTemplate(String eventTypeId, String destinationType, String destinationSubType);
}
#Configuration
#ConfigurationProperties(prefix = "caching")
#Data
#Slf4j
static class CacheConfiguration {
#Data
public static class CacheSpec {
private Integer expireAfterWrite;
}
private Map<String, CacheSpec> specs;
#Bean
public CacheManager cacheManager(Ticker ticker) {
SimpleCacheManager manager = new SimpleCacheManager();
if (specs != null) {
List<CaffeineCache> caches =
specs.entrySet().stream()
.map(entry -> buildCache(entry.getKey(),
entry.getValue(),
ticker))
.collect(Collectors.toList());
manager.setCaches(caches);
}
return manager;
}
private CaffeineCache buildCache(String name, CacheSpec cacheSpec, Ticker ticker) {
log.info("Cache {} specified timeout of {} min", name, cacheSpec.getExpireAfterWrite());
final Caffeine<Object, Object> caffeineBuilder
= Caffeine.newBuilder()
.expireAfterWrite(cacheSpec.getExpireAfterWrite(), TimeUnit.MINUTES)
.ticker(ticker);
return new CaffeineCache(name, caffeineBuilder.build());
}
#Bean
public Ticker ticker() {
return Ticker.systemTicker();
}
#Bean
TemplateRepository myRepo() {
return Mockito.mock(TemplateRepository.class);
}
}
#Autowired
CacheManager manager;
#Autowired
TemplateRepository repo;
#Test
public void methodInvocationShouldBeCached() {
Optional<Template> third = Optional.of(new NotificationTemplate(UUID.randomUUID(),"Test",DestinationType.SMS,"test","test",Optional.empty(),Optional.empty()));
Optional<Template> fourth = Optional.of(new NotificationTemplate(UUID.randomUUID(),"Test2",DestinationType.SMS,"test2","test2",Optional.empty(),Optional.empty()));
// the mock to return *different* objects for the first and second call
Mockito.when(repo.getNotificationTemplate(Mockito.any(String.class),Mockito.any(String.class),Mockito.any(String.class))).thenReturn(third);
// First invocation returns object returned by the method
Object result = repo.getNotificationTemplate("1","1","1");
assertThat(result, is(third));
// Second invocation should return cached value, *not* second (as set up above)
result = repo.getNotificationTemplate("1","1","1");
assertThat(result, is(third));
// Verify repository method was invoked once
Mockito.verify(repo, Mockito.times(1)).getNotificationTemplate("1","1","1");
assertThat(manager.getCache("notificationTemplates").get(""), is(notNullValue()));
// Third invocation with different key is triggers the second invocation of the repo method
result = repo.getNotificationTemplate("2","2","2");
assertThat(result, is(fourth));
}
}
Property file:
caching:
specs:
Templates:
expireAfterWrite: 1440
I have a simple PersonController class that provides save() method to persist the object from http post request.
package org.rw.controller;
import java.sql.Timestamp;
import java.util.List;
import org.rw.entity.Person;
import org.rw.service.PersonService;
import org.rw.spring.propertyeditor.TimestampPropertyEditor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
#Controller
#RequestMapping(value="/person")
public class PersonController {
private static final Logger logger = LoggerFactory.getLogger(PersonController.class);
#Autowired
private PersonService personService;
#Autowired
TimestampPropertyEditor timestampPropertyEditor;
#InitBinder
public void initBinder(WebDataBinder binder) {
binder.registerCustomEditor(Timestamp.class, "dob", timestampPropertyEditor);
}
#RequestMapping(value="/save", method=RequestMethod.POST)
public String save(Model model, Person person) {
Long personId = personService.save(person);
return "redirect:view/" + personId;
}
}
As the save() method returns as return "redirect:view/" + personId;. It will be diffrerent for every request. it may be like "view/5" or "view/6" depending on the id of the object that has been persisted.
Then i have a simple class to test the above controller with spring mocking.
package org.rw.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Test;
import org.rw.service.UserService;
import org.rw.test.SpringControllerTest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
public class PersonControllerTest extends SpringControllerTest {
#Autowired
private UserService userService;
#Test
public void add() throws Exception {
mockMvc.perform(get("/person/add", new Object[0])).andExpect(status().isOk());
}
#Test
public void save() throws Exception {
UserDetails userDetails = userService.findByUsername("anil");
Authentication authToken = new UsernamePasswordAuthenticationToken (userDetails.getUsername(), userDetails.getPassword(), userDetails.getAuthorities());
SecurityContextHolder.getContext().setAuthentication(authToken);
mockMvc.perform(
post("/person/save", new Object[0])
.param("firstName", "JunitFN")
.param("lastName", "JunitLN")
.param("gender", "M")
.param("dob", "11/02/1989")
).andExpect(
redirectedUrl("view")
);
}
}
now here i have a problem that redirectedUrl("view") is rejecting value "view/5". I have tried redirectedUrl("view*") and redirectedUrl("view/*") but its not working.
Edit :
Here I have got a workaround as per below
MvcResult result = mockMvc.perform(
post("/person/save", new Object[0])
.param("firstName", "JunitFN")
.param("lastName", "JunitLN")
.param("gender", "MALE")
.param("dob", "11/02/1989")
).andExpect(
//redirectedUrl("view")
status().isMovedTemporarily()
).andReturn();
MockHttpServletResponse response = result.getResponse();
String location = response.getHeader("Location");
Pattern pattern = Pattern.compile("\\Aview/[0-9]+\\z");
assertTrue(pattern.matcher(location).find());
but still i am looking for the proper way.
update:
I have posted the same issue on spring jira here :
Since spring 4.0 you can use redirectedUrlPattern as pointed by Paulius Matulionis
As of spring 3.x this is not supported out of the box but you can easily add you custom result matcher
private static ResultMatcher redirectedUrlPattern(final String expectedUrlPattern) {
return new ResultMatcher() {
public void match(MvcResult result) {
Pattern pattern = Pattern.compile("\\A" + expectedUrlPattern + "\\z");
assertTrue(pattern.matcher(result.getResponse().getRedirectedUrl()).find());
}
};
}
And use it like build-in matcher
mockMvc.perform(
post("/person/save", new Object[0])
.param("firstName", "JunitFN")
.param("lastName", "JunitLN")
.param("gender", "M")
.param("dob", "11/02/1989")
).andExpect(
redirectedUrlPattern("view/[0-9]+")
);
Since 4.0 it is available in Spring itself.
Please check here.