Spring AMQP Priority Message - java

Message priorities in queues in RabbitMQ. It is working with rabbitmq provided java client. But it does not work with spring-rabbit dependency. Please have a look.
RabbitMQ Server Version - 3.6.5
Erlang - OTP 19 (8.0)
Using RabbitMQ Java Client
Pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.springframework.samples</groupId>
<artifactId>RabbitMQ</artifactId>
<version>0.0.1-SNAPSHOT</version>
<developers>
<developer>
<name>Sagar Rout</name>
</developer>
</developers>
<properties>
<!-- Generic properties -->
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<!-- Spring -->
<spring-framework.version>4.3.2.RELEASE</spring-framework.version>
</properties>
<dependencies>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<!-- Spring AMQP -->
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit</artifactId>
<version>1.6.1.RELEASE</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
Publisher.java
public class Publisher {
private final static String QUEUE_NAME = "S1_Priority";
public static void main(String[] argv) throws Exception {
ConnectionFactory factory = new ConnectionFactory();
factory.setHost("localhost");
Connection connection = factory.newConnection();
Channel channel = connection.createChannel();
Map<String, Object> args = new HashMap<String, Object>();
args.put("x-max-priority", 10);
channel.queueDeclare(QUEUE_NAME, false, false, false, args);
String message = "Hello World!";
for (int i = 0; i < 10; i++) {
channel.basicPublish("", QUEUE_NAME,
new AMQP.BasicProperties.Builder().contentType("text/plain").deliveryMode(2).priority(i).build(),
message.getBytes("UTF-8"));
System.out.println(" [x] Sent '" + message + "'" + "priority" + i);
}
channel.close();
connection.close();
}}
Consumer.Java
public class Consumer {
private final static String QUEUE_NAME = "S1_Priority";
public static void main(String[] argv) throws Exception {
ConnectionFactory factory = new ConnectionFactory();
factory.setHost("localhost");
Connection connection = factory.newConnection();
Channel channel = connection.createChannel();
Map<String, Object> args = new HashMap<String, Object>();
args.put("x-max-priority", 10);
channel.queueDeclare(QUEUE_NAME, false, false, false, args);
System.out.println(" [*] Waiting for messages. To exit press CTRL+C");
DefaultConsumer consumer = new DefaultConsumer(channel) {
#Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties,
byte[] body) throws IOException {
String message = new String(body, "UTF-8");
System.out.println(" [x] Received '" + message + "'" + properties.getPriority());
}
};
channel.basicConsume(QUEUE_NAME, true, consumer);
}}
This is working and the message with higher priority is coming up. But it does not work with Spring-rabbit. Please find the code.
RabbitMQConfig.class
#Configuration
#ComponentScan(basePackages = { "com.blackocean.*" })
#PropertySource("classpath:config.properties")
public class RabbitMQConfig {
#Value("${rabbitmq.host}")
private String host;
#Value("${rabbitmq.port}")
private Integer port;
#Value("${rabbitmq.username}")
private String username;
#Value("${rabbitmq.password}")
private String password;
#Value("${rabbitmq.connection.size}")
private Integer connectionSize ;
#Bean
public static PropertySourcesPlaceholderConfigurer propertyConfigInDev() {
return new PropertySourcesPlaceholderConfigurer();
}
#Bean
public ConnectionFactory connectionFactory() {
CachingConnectionFactory cachingConnectionFactory = new CachingConnectionFactory();
cachingConnectionFactory.setHost(host);
cachingConnectionFactory.setPort(port);
cachingConnectionFactory.setUsername(username);
cachingConnectionFactory.setPassword(password);
cachingConnectionFactory.setConnectionLimit(connectionSize);
return cachingConnectionFactory;
}
#Bean
public RabbitAdmin rabbitAdmin() {
return new RabbitAdmin(connectionFactory());
}
#Bean
public RabbitTemplate rabbitTemplate() {
return new RabbitTemplate(connectionFactory());
}
#Bean
public Queue queue() {
Map<String, Object> args = new HashMap<String, Object>();
args.put("x-priority", 10);
Queue queue = new Queue("myQueue", true, false, false, args) ;
return queue ;
}}
SendUsingJavaConfig
public class Send1UsingJavaConfig {
/**
* #param args
*/
public static void main(String[] args) {
ApplicationContext context = new AnnotationConfigApplicationContext(RabbitMQConfig.class);
RabbitTemplate rabbitTemplate = context.getBean(RabbitTemplate.class);
rabbitTemplate.convertAndSend("", "myQueue", "Hi Mr.Ocean 10", new MessagePostProcessor() {
#Override
public Message postProcessMessage(Message message) throws AmqpException {
message.getMessageProperties().setPriority(9);
return message;
}
});
}
}
ReceiveusingJavaConfig
public class RecvUsingJavaConfig {
public static void main(String[] args) {
ApplicationContext context = new AnnotationConfigApplicationContext(RabbitMQConfig.class);
RabbitTemplate rabbitTemplate = context.getBean(RabbitTemplate.class);
// Basic Example
String message = (String) rabbitTemplate.receiveAndConvert("myQueue");
System.out.println(message);
}}
Config.properties
#RabbitMQ
rabbitmq.host=localhost
#Always provide port and connection size in numbers
rabbitmq.port=5672
rabbitmq.username=guest
rabbitmq.password=guest
rabbitmq.connection.size=100
Now I am sending the message with different priority but it always receives the message in the order. Any suggestion will be great !!!

Just a guess here , i tried looking into an old AMQP library i had used (priority queue in an older version of Rabbit MQ).
The priority was set as below
args.put("x-max-priority", 10); , it looks slightly different from args.put("x-priority", 10);.
You could refer the old priority queue repo in the link. You could try to see if that helps

Queue must have arg 'x-max-priority'.
When publishing messageProperties.priority must be none 0.
When using spring-boot amqp, it is important to set
spring.rabbitmq.listener.simple.prefetch=1
Otherwise spring-boot is fetching 250 messages absolutely ignoring priorities.

If anyone has similar requirements on message priority then you need to define priority (Configuration Class) before the queue is created. If you plan to apply the config for existing queues it will not work (from my testing).
#Value("${myApp.rabbitmq.queue}")
private String queueName;
#Bean
Queue queue(){
Map<String, Object> args = new HashMap<String, Object>();
args.put("x-max-priority", 10);
Queue queue = new Queue(queueName, true, false, false, args) ;
return queue ;
}
When you push messages into the queue make sure the priority doesn't exceed 10 as we have defined max priority on the queue as 10.
BR, Santhosh

Related

Unit testing Spring Cloud Stream Producer-Processor-Consumer Scenario

I have created an sample app for producer-processor-consumer scenario using Spring Cloud Scenario. Here, I have used legacy annotation based approach.
In unit tests, I wanted to test simple scenario of producing a message and asserting consumed message after it undergoes transformation. But I am not receiving message at consumer binding end. Please let me know what could be missing here.
Producer.java
#EnableBinding(MyProcessor.class)
public class Producer {
#Bean
#InboundChannelAdapter(value = MyProcessor.OUTPUT, poller = #Poller(fixedDelay = "1000", maxMessagesPerPoll = "1"))
public MessageSource<String> produceMessage() {
return () -> new GenericMessage<>("Hello Spring Cloud World >>> " + Instant.now());
}
}
TransformProcessor.java
#EnableBinding(MyProcessor.class)
public class TransformProcessor {
#Transformer(inputChannel = MyProcessor.OUTPUT, outputChannel = MyProcessor.INPUT)
public String transform(String message) {
System.out.println("Transforming the message: " + message);
return message.toUpperCase();
}
}
Consumer.java
#EnableBinding(MyProcessor.class)
public class Consumer {
#StreamListener(MyProcessor.INPUT)
public void consume(String message) {
System.out.println("Consuming transformed message: " + message);
}
}
MyProcessor.java
public interface MyProcessor {
String INPUT = "my-input";
final static String OUTPUT = "my-output";
#Input(INPUT)
SubscribableChannel anInput();
#Output(OUTPUT)
MessageChannel anOutput();
}
SpringCloudStreamLegacyApplicationTests.java
#SpringBootTest
class SpringCloudStreamLegacyApplicationTests {
#Autowired
private MyProcessor myProcessor;
#Autowired
private MessageCollector messageCollector;
#Test
public void testConsumer() {
myProcessor.anOutput().send(new GenericMessage<byte[]>("hello".getBytes()));
Message<?> poll = messageCollector.forChannel(myProcessor.anInput()).poll();
System.out.println("Received: " + poll.getPayload());
}
}
Here, I am expecting a message to be received in handleMessage method.
Note, I am using following dependency for tests:
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-test-support</artifactId>
<scope>test</scope>
</dependency>

RabbitMq RepublishMessageRecover ignore ImmediateAcknowledgeAmqpException

Im creating a RabbitMQ consumer using...
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit</artifactId>
<version>2.2.1.RELEASE</version>
</dependency>
My Listener looks like this...
#RabbitListener(queues = "queue")
public void getMessages(String message) {
if(message.contains("error")) {
throw new ImmediateAcknowledgeAmqpException("Invalid message, discarding");
}
throw new AmqpRejectAndDontRequeueException("Re-queuing message");
}
Im expecting ImmediateAcknowledgeAmqpException to discard the message totally but it keeps ending up on the deadletter queue :( Heres my configuration
#Bean
public RepublishMessageRecoverer republishMessageRecoverer(RabbitTemplate rabbitTemplate) {
RepublishMessageRecoverer republishMessageRecoverer = new RepublishMessageRecoverer(rabbitTemplate,"exchange", "deadletter_routing_key");
return republishMessageRecoverer;
}
Bean
public SimpleRetryPolicy simpleRetryPolicy() {
Map<Class<? extends Throwable>, Boolean> includeExceptions = new HashMap<>();
includeExceptions.put(ImmediateAcknowledgeAmqpException.class, false);
includeExceptions.put(AmqpRejectAndDontRequeueException.class, true);
return new SimpleRetryPolicy(5, includeExceptions, true);
}
#Bean
public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(ConnectionFactory connectionFactory,
SimpleRetryPolicy simpleRetryPolicy, RepublishMessageRecoverer republishMessageRecoverer) {
SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory();
factory.setConnectionFactory(connectionFactory);
factory.setDefaultRequeueRejected(false);
factory.setAdviceChain(RetryInterceptorBuilder
.stateless()
.retryPolicy(simpleRetryPolicy)
.recoverer(republishMessageRecoverer)
.backOffOptions(5000, 1,
1000)
.build());
return factory;
}
Is there a way I can exclude the ImmediateAcknowledgeAmqpException from the recoverer?
Thanks

Spring boot JMS Listener - org.springframework.jms.config.JmsListenerEndpointRegistry.isAutoStartup()Z

I have setup a JmsListener to read messages from a sqs queue.
Firtsly i created a configuration class like below to setup a ConnectionFactory and JMSTemplate
#Configuration
#EnableJms
public class SQSJmsConfig {
private AWSCredentialsProvider awsCredentialsProvider;
#Value("${aws.ses.accessKey}")
private String accessKey;
#Value("${aws.ses.secretKey}")
private String secretKey;
#Bean
public DefaultJmsListenerContainerFactory jmsListenerContainerFactory() {
AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
awsCredentialsProvider = new AWSStaticCredentialsProvider(awsCredentials);
SQSConnectionFactory sqsConnectionFactory = new SQSConnectionFactory(new ProviderConfiguration(),
AmazonSQSClientBuilder.standard().withRegion(Regions.US_EAST_1).withCredentials(awsCredentialsProvider));
DefaultJmsListenerContainerFactory factory = new DefaultJmsListenerContainerFactory();
factory.setConnectionFactory(sqsConnectionFactory);
factory.setDestinationResolver(new DynamicDestinationResolver());
factory.setConcurrency("3-10");
factory.setSessionAcknowledgeMode(Session.CLIENT_ACKNOWLEDGE);
return factory;
}
#Bean
public JmsTemplate defaultJmsTemplate() {
AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
awsCredentialsProvider = new AWSStaticCredentialsProvider(awsCredentials);
SQSConnectionFactory sqsConnectionFactory = new SQSConnectionFactory(new ProviderConfiguration(),
AmazonSQSClientBuilder.standard().withRegion(Regions.US_EAST_1).withCredentials(awsCredentialsProvider));
return new JmsTemplate(sqsConnectionFactory);
}
}
Then i created a JmsListener method to read messages from sqs queue
#Service
public class SQSQueueService {
private static final Logger log = LoggerFactory.getLogger(SQSQueueService.class);
#JmsListener(destination = "${aws.sqs.queueName}")
public void readPlan(SQSEmailStatusQueue sqsEmailStatusQueue) {
try {
log.info("SNS notification received from SQS queue");
String message = sqsEmailStatusQueue.getMessage();
System.out.println(message);
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
After that when i run the project i got the following exception
java.lang.AbstractMethodError: org.springframework.jms.config.JmsListenerEndpointRegistry.isAutoStartup()Z
What is the reason for this exception ?
The problem was with the dependency.
`<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jms</artifactId>
<version>5.1.6.RELEASE</version>
</dependency>`
I removed the version from this. Then it worked.
Can anybody please explain me why it happened ?
The failure can occure if your application utilizes spring-boot-starter-parent and additionally depends on:
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jms</artifactId>
<version>${spring.jms.version}</version>
</dependency>
Utilize this dependency provided by the parent instead:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-activemq</artifactId>
</dependency>

How to consume HornetMQ message in spring boot application

I am trying to integrate HornetMQ Consumer in Springboot application. I have seen different example but all of them are pointing ActiveMQ implementation which make me little bit confuse. I have written a standard HornetQ Consumer in java. Here is a code:
public class HornetQClient {
private String JMS_QUEUE_NAME;
private String MESSAGE_PROPERTY_NAME;
private ClientSessionFactory sf = null;
private static final Logger LOGGER = LoggerFactory.getLogger(TCPClient.class);
public HornetQClient(String hostName, String hostPort, String queueName, String propertyName) {
try {
Map<String, Object> map = new HashMap<String, Object>();
map.put("host", hostName);
map.put("port", hostPort);
this.JMS_QUEUE_NAME = queueName;
this.MESSAGE_PROPERTY_NAME = propertyName;
ServerLocator serverLocator = HornetQClient.createServerLocatorWithoutHA(new TransportConfiguration(NettyConnectorFactory.class.getName(), map));
sf = serverLocator.createSessionFactory();
startReadMessages();
} catch (Exception e) {
e.printStackTrace();
}
}
private void startReadMessages() {
ClientSession session = null;
try {
if (sf != null) {
session = sf.createSession(true, true);
while (true) {
ClientConsumer messageConsumer = session.createConsumer(JMS_QUEUE_NAME);
session.start();
ClientMessage messageReceived = messageConsumer.receive(1000);
if (messageReceived != null && messageReceived.getStringProperty(MESSAGE_PROPERTY_NAME) != null) {
System.out.println("Received JMS TextMessage:" + messageReceived.getStringProperty(MESSAGE_PROPERTY_NAME));
messageReceived.acknowledge();
} else
System.out.println("no message available");
messageConsumer.close();
Thread.sleep(500);
}
}
} catch (Exception e) {
LOGGER.error("Error while adding message by producer.", e);
} finally {
try {
session.close();
} catch (HornetQException e) {
LOGGER.error("Error while closing producer session,", e);
}
}
}
This one is working fine but is there any standard way to write Message Consumer in spring boot application or should i directly create a bean of this client and use in Springboot application
--------------- hornetq-jms.xml---------
<?xml version="1.0"?>
<configuration xsi:schemaLocation="urn:hornetq /schema/hornetq-jms.xsd"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="urn:hornetq">
<!--the connection factory used by the example -->
<connection-factory name="ConnectionFactory">
<connectors>
<connector-ref connector-name="netty-connector" />
</connectors>
<entries>
<entry name="ConnectionFactory" />
</entries>
<consumer-window-size>0</consumer-window-size>
<connection-ttl>-1</connection-ttl>
</connection-factory>
<queue name="trackerRec">
<entry name="trackerRec" />
</queue>
</configuration>
You could perhaps use Spring JMS and JmsTemplate for this. The default set up for Spring boot is using an ActiveMQ connection factory, but if you exchange this for a HornetQConnetionFactory, you should be good to go:
#Configuration
#EnableJms
public class JmsConfig {
#Bean
public ConnectionFactory connectionFactory() {
final Map<String, Object> properties = new HashMap<>();
properties.put("host", "127.0.0.1");
properties.put("port", "5445");
final org.hornetq.api.core.TransportConfiguration configuration =
new org.hornetq.api.core.TransportConfiguration("org.hornetq.core.remoting.impl.netty.NettyConnectorFactory", properties);
return new org.hornetq.jms.client.HornetQJMSConnectionFactory(false, configuration);
}
#Bean
public JmsListenerContainerFactory<?> myFactory(ConnectionFactory connectionFactory,
DefaultJmsListenerContainerFactoryConfigurer configurer) {
DefaultJmsListenerContainerFactory factory = new DefaultJmsListenerContainerFactory();
configurer.configure(factory, connectionFactory);
return factory;
}
}
I added the fully qualified class names for clearity.
Then, in some bean, you can just do this to consume a message:
#JmsListener(destination = "some.queue", containerFactory = "myFactory")
public void receiveMessage(#Header("some.header") final String something) {
System.out.println("Received <" + something + ">");
}
Disclaimer: I have not actually tried this in practice, it is based on my experience with Spring and ActiveMQ, as well as these sources: https://dzone.com/articles/connecting-spring https://spring.io/guides/gs/messaging-jms/
You might have to do some digging to get this to work the way you want, but I think this approach is a bit more "high-level" than the one you are going for.

Spring Integration - How to implement an asynchronous TCP Socket requests/responses over the same connection?

I have a Python TCP Socket server service which:
Allows only one client connection at time;
Its inputstream/outputstream operates independently.
On the other side, I have a Java Spring Boot client application using Spring Integration. My actual TCP Socket configurator
implementation uses:
#MessagingGateway(defaultRequestChannel = REQUEST_CHANNEL, errorChannel = ERROR_CHANNEL)
public interface ClientGtw {
Future<Response> send(Request request);
}
#Bean
#ServiceActivator(inputChannel = REQUEST_CHANNEL)
public MessageHandler outboundGateway(TcpNioClientConnectionFactory connectionFactory) {
TcpOutboundGateway gateway = new TcpOutboundGateway();
gateway.setConnectionFactory(connectionFactory);
gateway.setRequestTimeout(TimeUnit.SECONDS.toMillis(timeout));
gateway.setRemoteTimeout(TimeUnit.SECONDS.toMillis(timeout));
return gateway;
}
#Bean
public TcpNioClientConnectionFactory clientConnectionFactory(AppConfig config) {
Host host = getHost(config);
TcpNioClientConnectionFactory factory = new TcpNioClientConnectionFactory(host.name, host.port);
factory.setSingleUse(false);
factory.setSoTimeout((int) TimeUnit.SECONDS.toMillis(timeout));
SerializerDeserializer sd = new SerializerDeserializer();
factory.setDeserializer(sd);
factory.setSerializer(sd);
return factory;
}
This actual approach works fine, however, when a request is sent it hangs the connection until a response is received. This is a problem due the fact that some times a request can get too much time to receive a response and the system has other requests incomming whose response can be achieved faster. I would like to send and receive as much as possible requests and responses independetly (decoupled between them). The object transported (serialized and deserialized) contains a key pair that can do the correct correlation.
TL;DR: How to implement an Asynchronous requests/responses over the same connection?
The Spring TcpOutboundGateway javadoc mentions: Use a pair of outbound/inbound adapters for that use case.
So, in addition to the declaration above:
1st Attempt
#Bean
public TcpInboundGateway inboundGateway(AbstractServerConnectionFactory connectionFactory) {
TcpInboundGateway gateway = new TcpInboundGateway();
gateway.setConnectionFactory(connectionFactory);
gateway.setRequestTimeout(TimeUnit.SECONDS.toMillis(timeout));
return gateway;
}
#Bean
public AbstractServerConnectionFactory serverFactory(AppConfig config) {
Host host = getHost(config);
AbstractServerConnectionFactory connectionFactory = new TcpNetServerConnectionFactory(host.port);
connectionFactory.setSingleUse(true);
connectionFactory.setSoTimeout(timeout);
return connectionFactory;
}
The requests are blocked until a response is delivered as before.
2nd Attempt
#Bean
public TcpInboundGateway inboundGateway(TcpNioClientConnectionFactory connectionFactory) {
TcpInboundGateway gateway = new TcpInboundGateway();
gateway.setConnectionFactory(connectionFactory);
gateway.setRequestTimeout(TimeUnit.SECONDS.toMillis(timeout));
gateway.setClientMode(true);
return gateway;
}
org.springframework.integration.ip.tcp.connection.TcpNioClientConnectionFactory may only be used by one inbound adapter
Any clue?
Use a pair of channel adapters instead of an outbound gateway. Instead of using a MessagingGateway, you can do the correlation yourself in your application, or you can use the same technique as is used in the tcp-client-server-multiplex sample app. It uses an aggregator to aggregate a copy of the outbound message with an inbound message, replying to the gateway.
It's old, and uses XML configuration, but the same techniques apply.
<publish-subscribe-channel id="input" />
<ip:tcp-outbound-channel-adapter id="outAdapter.client"
order="2"
channel="input"
connection-factory="client" /> <!-- Collaborator -->
<!-- Also send a copy to the custom aggregator for correlation and
so this message's replyChannel will be transferred to the
aggregated message.
The order ensures this gets to the aggregator first -->
<bridge input-channel="input" output-channel="toAggregator.client"
order="1"/>
<!-- Asynch receive reply -->
<ip:tcp-inbound-channel-adapter id="inAdapter.client"
channel="toAggregator.client"
connection-factory="client" /> <!-- Collaborator -->
<!-- dataType attribute invokes the conversion service, if necessary -->
<channel id="toAggregator.client" datatype="java.lang.String" />
<aggregator input-channel="toAggregator.client"
output-channel="toTransformer.client"
expire-groups-upon-completion="true"
expire-groups-upon-timeout="true"
discard-channel="noResponseChannel"
group-timeout="1000"
correlation-strategy-expression="payload.substring(0,3)"
release-strategy-expression="size() == 2" />
<channel id="noResponseChannel" />
<service-activator input-channel="noResponseChannel" ref="echoService" method="noResponse" />
<transformer input-channel="toTransformer.client"
expression="payload.get(1)"/> <!-- The response is always second -->
(This simple sample correlates on the first 3 bytes).
Gary, thanks for your guidance.
To solve this issue is important to first understand Messaging Channel types.
So, in the configurer class:
#Bean(name = REQUEST_CHANNEL)
public DirectChannel sender() {
return new DirectChannel();
}
#Bean(name = RESPONSE_CHANNEL)
public PollableChannel receiver() {
return new QueueChannel();
}
#Bean
#ServiceActivator(inputChannel = REQUEST_CHANNEL)
public TcpSendingMessageHandler outboundClient(TcpNioClientConnectionFactory connectionFactory) {
TcpSendingMessageHandler outbound = new TcpSendingMessageHandler();
outbound.setConnectionFactory(connectionFactory);
outbound.setRetryInterval(TimeUnit.SECONDS.toMillis(timeout));
outbound.setClientMode(true);
return outbound;
}
#Bean
public TcpReceivingChannelAdapter inboundClient(TcpNioClientConnectionFactory connectionFactory) {
TcpReceivingChannelAdapter inbound = new TcpReceivingChannelAdapter();
inbound.setConnectionFactory(connectionFactory);
inbound.setRetryInterval(TimeUnit.SECONDS.toMillis(timeout));
inbound.setOutputChannel(receiver());
inbound.setClientMode(true);
return inbound;
}
This scratch #Singleton class illustrates how to operate the requests and responses (considering that requests and responses contains a UID to correlate them):
#Autowired
private DirectChannel sender;
#Autowired
private PollableChannel receiver;
private BlockingQueue<Request> requestPool = new LinkedBlockingQueue<>();
private Map<String, Response> responsePool = Collections.synchronizedMap(new HashMap<>());
#PostConstruct
private void init() {
new Receiver().start();
new Sender().start();
}
/*
* It can be called as many as necessary without hanging for a response
*/
public void send(Request req) {
requestPool.add(req);
}
/*
* Check for a response until a socket timout
*/
public Response receive(String key) {
Response res = responsePool.get(key);
if (res != null) {
responsePool.remove(key);
}
return res;
}
private class Receiver extends Thread {
#Override
public void run() {
while (true) {
try {
tcpReceive();
Thread.sleep(250);
} catch (InterruptedException e) { }
}
}
private void tcpReceive() {
Response res = (Message<Response>) receiver.receive();
if (res != null) {
responsePool.put(res.getUID(), res);
}
}
}
private class Sender extends Thread {
#Override
public void run() {
while (true) {
try {
tcpSend();
Thread.sleep(250);
} catch (InterruptedException e) { }
}
}
private void tcpSend() {
Request req = requestPool.poll(125, TimeUnit.MILLISECONDS);
if (req != null) {
sender.send(MessageBuilder.withPayload(req).build());
}
}
}
UPDATED
I forgot to mention this:
#Bean
public TcpNioClientConnectionFactory clientConnectionFactory(Config config) {
// Get host properties
Host host = getHost(config);
// Create socket factory
TcpNioClientConnectionFactory factory = new TcpNioClientConnectionFactory(host.name, host.port);
factory.setSingleUse(false); // IMPORTANT FOR SINGLE CHANNEL
factory.setSoTimeout((int) TimeUnit.SECONDS.toMillis(timeout));
return factory;
}
Feel free to make any considerations.

Categories

Resources