Spring integration multiple clients connecting to server port - java

From my application, I need to configure multiple client connections that needs to connect to a single server. To do this, I create a variable amount of beans with the ApplicationContext Beanfactory, based on how many clients I have configured. Here is the code for 2 clients:
//setup beans;
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.scan("pkg");
ConnectionFactory factory = new ConnectionFactory();
int clients = 2; //TODO read this value from file
ConfigurableListableBeanFactory beanFactory = ctx.getBeanFactory();
for (int count = 1; count <= clients; count++) {
TcpNetClientConnectionFactory connectionFactory = factory.createClientConnectionFactory("127.0.0.1", 6680);
//connection factory
beanFactory.registerSingleton("connectionFactory_" + String.valueOf(count), connectionFactory);
//inbound gateway
MessageChannel input = new DirectChannel();
MessageChannel output = new DirectChannel();
TcpInboundGateway gateway = factory.createInboundGateway(connectionFactory, beanFactory, input, output, 10000, 20000);
beanFactory.registerSingleton("gateway_" + String.valueOf(count), gateway);
//message transformation and handling
IntegrationFlow flow = factory.createFlow(input);
beanFactory.registerSingleton("flow_" + String.valueOf(count), flow);
}
ctx.refresh();
//open connections
for(int count = 1; count <= clients; count++) {
TcpInboundGateway gateway = ctx.getBean("gateway_" + count, TcpInboundGateway.class);
//necessary for the client to connect
gateway.retryConnection();
}
Here is my factory methods:
#EnableIntegration
#IntegrationComponentScan
#Configuration
public class ConnectionFactory {
public TcpNetClientConnectionFactory createClientConnectionFactory(String ip, int port) {
TcpNetClientConnectionFactory factory = new TcpNetClientConnectionFactory(ip, port);
factory.setSingleUse(false);
factory.setSoTimeout(10000);
factory.setSerializer(new ByteArrayLfSerializer());
factory.setDeserializer(new ByteArrayLfSerializer());
return factory;
}
public TcpInboundGateway createInboundGateway(
AbstractConnectionFactory factory,
BeanFactory beanFactory,
MessageChannel input,
int replyTimeout,
int retryInterval) {
TcpInboundGateway gateway = new TcpInboundGateway();
gateway.setRequestChannel(input);
gateway.setConnectionFactory(factory);
gateway.setClientMode(true);
gateway.setReplyTimeout(replyTimeout);
gateway.setRetryInterval(retryInterval);
ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler();
scheduler.initialize();
gateway.setTaskScheduler(scheduler);
gateway.setBeanFactory(beanFactory);
return gateway;
}
public IntegrationFlow createFlow(MessageChannel input) {
IntegrationFlowBuilder builder = IntegrationFlows.from(input);
builder.transform(Transformers.objectToString()).handle(System.out::println);
return builder.get();
}
}
When I run my program, both clients connects to my server. However, as soon as the server sends its first payload to each client I get the following exception (one for each client):
Exception sending message: GenericMessage [payload=byte[5], headers={ip_tcp_remotePort=6680, ip_connectionId=localhost:6680:33372:e26b9973-a32e-4c28-b808-1f2556576d01, ip_localInetAddress=/127.0.0.1, ip_address=127.0.0.1, id=4443ca34-fb53-a753-7603-53f6d7d82e11, ip_hostname=localhost, timestamp=1464098102462}]
org.springframework.messaging.MessageDeliveryException: Dispatcher has no subscribers for channel 'unknown.channel.name'.; nested exception is org.springframework.integration.MessageDispatchingException: Dispatcher has no subscribers
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:81) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:442) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:115) ~[spring-messaging-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.messaging.core.GenericMessagingTemplate.doSendAndReceive(GenericMessagingTemplate.java:150) ~[spring-messaging-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.messaging.core.GenericMessagingTemplate.doSendAndReceive(GenericMessagingTemplate.java:45) ~[spring-messaging-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.messaging.core.AbstractMessagingTemplate.sendAndReceive(AbstractMessagingTemplate.java:42) ~[spring-messaging-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.integration.core.MessagingTemplate.sendAndReceive(MessagingTemplate.java:97) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.integration.gateway.MessagingGatewaySupport.doSendAndReceive(MessagingGatewaySupport.java:422) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.integration.gateway.MessagingGatewaySupport.sendAndReceiveMessage(MessagingGatewaySupport.java:390) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.integration.ip.tcp.TcpInboundGateway.doOnMessage(TcpInboundGateway.java:119) ~[spring-integration-ip-4.2.5.RELEASE.jar:na]
at org.springframework.integration.ip.tcp.TcpInboundGateway.onMessage(TcpInboundGateway.java:97) ~[spring-integration-ip-4.2.5.RELEASE.jar:na]
at org.springframework.integration.ip.tcp.connection.TcpNetConnection.run(TcpNetConnection.java:182) ~[spring-integration-ip-4.2.5.RELEASE.jar:na]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_31]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_31]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_31]
Caused by: org.springframework.integration.MessageDispatchingException: Dispatcher has no subscribers
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:153) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:120) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:77) ~[spring-integration-core-4.2.5.RELEASE.jar:na]
... 14 common frames omitted
The idea was that the data would be read, sent through the channels I configured for my InboundGateway to the transformer, which will then transform the data to a String after which it will be printed out.
Why does the framework not know which channel to put the data? As far as I can see, I did create a unique channel for each client in the inbound gateway factory method. Can someone please have a look at my configuration and let me know what I missed, as I am absolutely stumped by this one.

There is no one who is going to consume message from your gateway.setReplyChannel(output);.
At least we don't see anything like:
after which it will be printed out.
In most cases we have Dispatcher has no subscribers if some your SubscribableChannel is without any subscribers: not configured or stopped.
EDIT
Forget my previous expression. It is for the outbound case.
Your TcpInboundGateway is good. Although you don't need setReplyChannel() because you always can rely on the default built-in TemporaryReplyChannel to wait for some result from downstream flow.
Your IntegrationFlow also looks good. And that's correct that the .transform() doesn't send anything to any other channel. It just relies on the TemporaryReplyChannel in headers.
I think your problem is that you don't specify #EnableIntegraiton for any of your #Configuration class: http://docs.spring.io/spring-integration/reference/html/overview.html#_configuration
EDIT 2
See the GH issue on the matter.
So, what you need in addition to your code is:
beanFactory.initializeBean(); for each your manual registerSingleton(). Because see JavaDocs of the last one:
* <p>The given instance is supposed to be fully initialized; the registry
* will not perform any initialization callbacks (in particular, it won't
* call InitializingBean's {#code afterPropertiesSet} method).
Do that already after ctx.refresh() to let to be registered all necessary BeanPostProcessors including one for Spring Integration Java DSL parsing.
Invoke ctx.start() to start all the Lifecycles. Because these new manually added haven't been visible by the regular ctx.refresh() process.

Here is the working simplified solution:
Beans.java
package beanconfig;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.config.EnableIntegration;
#Configuration
#EnableIntegration
public class Beans {
//Beans can be configured here
}
IntegrationTest.java
import org.junit.Test;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.ip.tcp.TcpInboundGateway;
import org.springframework.integration.ip.tcp.connection.TcpNetClientConnectionFactory;
import org.springframework.integration.ip.tcp.serializer.ByteArrayLfSerializer;
import org.springframework.integration.transformer.ObjectToStringTransformer;
import org.springframework.messaging.MessageChannel;
public class IntegrationTest {
private String generateComponentName(String baseName, int instanceCount) {
return baseName + "_" + instanceCount;
}
#Test
public void integrationTest1() throws Exception {
try(AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext()) {
ctx.scan("beanconfig");
ctx.refresh();
ConfigurableListableBeanFactory beanFactory = ctx.getBeanFactory();
int numberOfClients = 2; //TODO configure from file
for (int count = 0; count < numberOfClients; count++) {
//connection factory
TcpNetClientConnectionFactory connectionFactory = new TcpNetClientConnectionFactory("127.0.0.1", 6680);
connectionFactory.setSingleUse(false);
connectionFactory.setSoTimeout(10000);
connectionFactory.setSerializer(new ByteArrayLfSerializer());
connectionFactory.setDeserializer(new ByteArrayLfSerializer());
//inbound gateway
TcpInboundGateway inboundGateway = new TcpInboundGateway();
inboundGateway.setRequestChannel(new DirectChannel());
inboundGateway.setConnectionFactory(connectionFactory);
inboundGateway.setClientMode(true);
inboundGateway.setReplyTimeout(10000);
inboundGateway.setRetryInterval(20000);
//message transformation and flow
String flowName = generateComponentName("flow", count);
IntegrationFlow flow = IntegrationFlows.from(inboundGateway)
.transform(new ObjectToStringTransformer())
.handle(h -> System.out.println("Message received: " + h.getPayload()))
.get();
beanFactory.registerSingleton(flowName, flow);
beanFactory.initializeBean(flow, flowName);
}
ctx.start();
//TODO do proper validation here
Thread.sleep(10000);
}
}
}
Basically there were a couple of things wrong with my initial attempt. Here is what I changed to make it work:
1) When creating the AnnotationConfigApplicationContext, it must be created with a configuration class as parameter that is marked with the #EnableIntegration annotation. If not, then a component must be scanned by the context that contains this annotation. I did do this in my first attempt but called refresh too late, it should be called directly after ctx.scan. Because my ctx.refresh() was after my beanfactory registrations, #EnableIntegration was actually not set when the integration beans were created. Moving ctx.refresh() directly below ctx.scan() solves the problem.
2) Each bean registered into the context must also be initialized by the beanfactory. This is to ensure that the BeanPostProcessors are run (this is not done automatically by registerSingleton).
3) ctx.start() then needs to be called to enable the beans that were created after ctx.refresh().

Related

connection issues when running Kakfa test container with Spring Boot App

I am facing connection issues when running Kakfa test container(confluentinc/cp-kafka:5.4.3) with Spring Boot App. Wondering if someone has faced this issue as well. After kafka container starts, the Admin Client tries to connect to broker to fetch the metadata but fails to connect.
Error log:
[AdminClient clientId=adminclient-2] Connection to node -1 (localhost/127.0.0.1:9092) could not be established. Broker may not be available.
I tried following workarounds to ensure KafkaAdminClient uses right address but none of them worked:
Used boot strap server address
Used KAFKA_ADVERTISED_LISTENERS=BROKER://172.17.0.3:9092. This address was being set by testcontainers_start.sh within docker container
Used kafa.getContainerName() to form the address: Example: BROKER://t-adsad:9092
Used kafka.getHost() + “:” + kafka.getMappedPort(9092)
Test class:
#RunWith(SpringRunner.class)
#Import(KafkaTestContainersConfiguration.class)
#SpringBootTest
#DirtiesContext
public class KafkaTestContainersLiveTest {
#ClassRule
public static KafkaContainer kafka =
new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3"));
#BeforeClass
public static void setupBootstrapServer(){
String server = "BROKER://"+kafka.getNetworkAliases().get(0)+":9092";
System.setProperty("kafka.bootstrap.servers", server);
}
Configuration class:
#Configuration
#EnableKafka
public class KafkaTestContainersConfiguration {
#Value("${kafka.bootstrap.servers}")
private String bootstrapServer;
#Value("${kafka.topic}")
private String topic;
public final int NUM_PARTITIONS=1;
public final short REPLICATION_FACTOR=1;
#Bean
public AdminClient adminClient() {
return KafkaAdminClient.create(adminClientConfigs());
}
public Map<String, Object> adminClientConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
props.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 5000);
return props;
}
}

Dependency injection into websocket endpoint on embedded jetty

I have a the following websocket endpoint:
import javax.inject.Inject;
import javax.websocket.*;
import javax.websocket.server.ServerEndpoint;
#ServerEndpoint(value = "/blabla")
public class WebsocketService {
#Inject
private DatabaseProvider dbProvider;
#OnOpen
public void onOpen(Session session) throws IOException {
//do something
}
#OnMessage
public void onMessage(Session session, String socketPacket) throws IOException {
//do something else
}
...
}
The code to start the embedded server:
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.websocket.jsr356.server.deploy.WebSocketServerContainerInitializer;
import javax.websocket.server.ServerContainer;
//other imports
public static void main(String[] args) {
Server server = null;
try {
server = new Server(3081);
ServletContextHandler context = new ServletContextHandler();
context.setContextPath("/");
ServerContainer serverContainer = WebSocketServerContainerInitializer.configureContext(context);
serverContainer.addEndpoint(WebsocketService.class);
server.setHandler(context);
server.start();
server.join();
} catch (Exception e) {
logger.error(e.getMessage());
} finally {
if (server != null) {
server.destroy();
}
}
}
The code above works perfectly for the case without dependency injection. However, I want to inject the dbProvider into my WebsocketService and use it in the onMessage method.
QUESTION 1: How to do the injection for the websocket server?
P.S. There are multiple examples of how dependency injection is done for REST endpoinds using ResourceConfig + AbstractBinder + ServletContainer, but I am not sure how it can be applied for the case with the websocket server.
QUESTION 2: How to add a simple resource endpoint to the same server (to serve javascript)?
Quite a few moving parts in this question.
First you have to setup Weld (the CDI implementation) to properly integrate it with your ServletContextHandler
Typically seen like this ...
ServletContextHandler context = new ServletContextHandler();
// Enable Weld + CDI
context.setInitParameter(
CdiServletContainerInitializer.CDI_INTEGRATION_ATTRIBUTE,
CdiDecoratingListener.MODE);
context.addBean(
new ServletContextHandler.Initializer(context,
new CdiServletContainerInitializer()));
context.addBean(
new ServletContextHandler.Initializer(context,
new org.jboss.weld.environment.servlet.EnhancedListener()));
Then the injection (actually decoration) is automatically taken care of internally between Jetty and Weld.
Note: the ServletContexthandler.Initializer is a convenience class to allow your embedded-jetty to run an arbitrary javax.servlet.ServletContainerInitializer without all of the overhead of a full blown WebApp and it's complex initialization process.
The CdiServletContainerInitializer is a ServletContainerInitializer that Jetty provides which sets up various things in the ServletContext to allow Weld to wire itself up properly to the ServletContext.
The EnhancedListener is also a ServletContainerInitializer that weld provides which does it's side of the wiring up for Weld + CDI.
For serving static files, you'll want to have a "Base Resource" defined in your ServletContextHandler and then add the DefaultServlet to the "default" url-pattern of "/".
ServletContextHandler context = new ServletContextHandler();
context.setBaseResource(Resource.newResource(webRootUri));
context.addServlet(DefaultServlet.class, "/");
If you want to see all of this together, check out the example project at
https://github.com/jetty-project/embedded-jetty-weld

Spring Kafka. Not started EmbeddedKafkaBroker

I am coding Kafka Broker and Consumer to catch messages from the application. When trying to get messages from Consumer, an error occurs
java.net.ConnectException: Connection refused: no further information
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
at org.apache.kafka.common.network.PlaintextTransportLayer.finishConnect(PlaintextTransportLayer.java:50)
at org.apache.kafka.common.network.KafkaChannel.finishConnect(KafkaChannel.java:216)
at org.apache.kafka.common.network.Selector.pollSelectionKeys(Selector.java:531)
at org.apache.kafka.common.network.Selector.poll(Selector.java:483)
at org.apache.kafka.clients.NetworkClient.poll(NetworkClient.java:540)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:262)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:233)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:212)
at org.apache.kafka.clients.consumer.internals.AbstractCoordinator.ensureCoordinatorReady(AbstractCoordinator.java:230)
at org.apache.kafka.clients.consumer.internals.ConsumerCoordinator.poll(ConsumerCoordinator.java:444)
at org.apache.kafka.clients.consumer.KafkaConsumer.updateAssignmentMetadataIfNeeded(KafkaConsumer.java:1267)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1231)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1211)
at org.springframework.kafka.test.utils.KafkaTestUtils.getRecords(KafkaTestUtils.java:303)
at org.springframework.kafka.test.utils.KafkaTestUtils.getRecords(KafkaTestUtils.java:280)
On the application side (Producer), there is also a connection error
2020-03-25 12:29:33.689 WARN 25786 --- [ad | producer-1] org.apache.kafka.clients.NetworkClient : [Producer clientId=producer-1, transactionalId=tx0] Connection to node -1 (<here broker hostname>:9092) could not be established. Broker may not be available.
My project has the following dependencies:
compile "org.springframework.kafka:spring-kafka-test:2.4.4.RELEASE"
compile "org.springframework.kafka:spring-kafka:2.4.4.RELEASE"
Code of My Kafka Broker
public class KafkaServer {
private static final String BROKERPORT = "9092";
private static final String BROKERHOST = "localhost";
public static final String TOPIC1 = "fss-fsstransdata";
public static final String TOPIC2 = "fss-fsstransscores";
public static final String TOPIC3 = "fss-fsstranstimings";
public static final String TOPIC4 = "fss-fssdevicedata";
#Getter
private Consumer<String, String> consumer;
private EmbeddedKafkaBroker embeddedKafkaBroker;
public void run() {
String[] topics = {TOPIC1, TOPIC2, TOPIC3, TOPIC4};
this.embeddedKafkaBroker = new EmbeddedKafkaBroker(
1,
false,
1,
topics
).kafkaPorts(BROKERPORT);
Map<String, Object> configs = new HashMap<>(KafkaTestUtils.consumerProps("consumer", "false", this.embeddedKafkaBroker));
this.consumer = new DefaultKafkaConsumerFactory<>(configs, new StringDeserializer(), new StringDeserializer()).createConsumer();
this.consumer.subscribe(Arrays.asList(topics));
}
}
Please help to deal with the situation. I am not good at kafka architecture and how it can be implemented on Spring.
The EmbeddedKafkaBroker is designed to be used from a Spring application context or by a JUnit4 #Rule or #ClassRule or by a JUnit5 Condition.
To use it outside those environments, you must call afterPropertiesSet() to initialize it and destroy() to shut it down.
If you are using spring then you need to annotate your bean with #EmbeddedKafka and then use #Autowire on EmbeddedKafkaBroker
Example embeded kafka annotation configuration:
#EmbeddedKafka(
partitions = 1,
controlledShutdown = false,
brokerProperties = {// place your proerties here
})
What I would do is to create a spring bean KafkaServerConfig and place all my logic for configuration and bean construction inside.
PS: it should be noted that EmbeddedKafkaBroker is intended for unit tests.

Exception: org.springframework.messaging.MessageDeliveryException: Dispatcher has no subscribers for channel

I have a sandbox for exploring newly added functions in Spring Cloud Stream, but I've faced a problem with using Function and Supplier in one Spring Cloud Stream application.
In code I used examples described in docs.
Firstly I added to project Function<String, String> with corresponding spring.cloud.stream.bindings and spring.cloud.stream.function.definition properties in application.yml. Everything is working fine, I post message to my-fun-in Kafka topic, application execute function and send result to my-fun-out topic.
Then I added Supplier<Flux<String>> to the same project with corresponding spring.cloud.stream.bindings and updated spring.cloud.stream.function.definition value to fun;sup. And here weird things start to happen. When I try to start application I receive the following error:
2020-01-15 01:45:16.608 ERROR 10128 --- [oundedElastic-1] o.s.integration.handler.LoggingHandler : org.springframework.messaging.MessageDeliveryException: Dispatcher has no subscribers for channel 'application.sup-out-0'.; nested exception is org.springframework.integration.MessageDispatchingException: Dispatcher has no subscribers, failedMessage=GenericMessage [payload=byte[20], headers={contentType=application/json, id=89301e00-b285-56e0-cb4d-8133555c8905, timestamp=1579045516603}], failedMessage=GenericMessage [payload=byte[20], headers={contentType=application/json, id=89301e00-b285-56e0-cb4d-8133555c8905, timestamp=1579045516603}]
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:77)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:453)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:403)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:166)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:109)
at org.springframework.integration.router.AbstractMessageRouter.doSend(AbstractMessageRouter.java:206)
at org.springframework.integration.router.AbstractMessageRouter.handleMessageInternal(AbstractMessageRouter.java:188)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:170)
at org.springframework.integration.handler.AbstractMessageHandler.onNext(AbstractMessageHandler.java:219)
at org.springframework.integration.handler.AbstractMessageHandler.onNext(AbstractMessageHandler.java:57)
at org.springframework.integration.endpoint.ReactiveStreamsConsumer$DelegatingSubscriber.hookOnNext(ReactiveStreamsConsumer.java:165)
at org.springframework.integration.endpoint.ReactiveStreamsConsumer$DelegatingSubscriber.hookOnNext(ReactiveStreamsConsumer.java:148)
at reactor.core.publisher.BaseSubscriber.onNext(BaseSubscriber.java:160)
at reactor.core.publisher.FluxDoFinally$DoFinallySubscriber.onNext(FluxDoFinally.java:123)
at reactor.core.publisher.EmitterProcessor.drain(EmitterProcessor.java:426)
at reactor.core.publisher.EmitterProcessor.onNext(EmitterProcessor.java:268)
at reactor.core.publisher.FluxCreate$BufferAsyncSink.drain(FluxCreate.java:793)
at reactor.core.publisher.FluxCreate$BufferAsyncSink.next(FluxCreate.java:718)
at reactor.core.publisher.FluxCreate$SerializedSink.next(FluxCreate.java:153)
at org.springframework.integration.channel.FluxMessageChannel.doSend(FluxMessageChannel.java:63)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:453)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:403)
at org.springframework.integration.channel.FluxMessageChannel.lambda$subscribeTo$2(FluxMessageChannel.java:83)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:189)
at reactor.core.publisher.FluxPublishOn$PublishOnSubscriber.runAsync(FluxPublishOn.java:398)
at reactor.core.publisher.FluxPublishOn$PublishOnSubscriber.run(FluxPublishOn.java:484)
at reactor.core.scheduler.WorkerTask.call(WorkerTask.java:84)
at reactor.core.scheduler.WorkerTask.call(WorkerTask.java:37)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: org.springframework.integration.MessageDispatchingException: Dispatcher has no subscribers, failedMessage=GenericMessage [payload=byte[20], headers={contentType=application/json, id=89301e00-b285-56e0-cb4d-8133555c8905, timestamp=1579045516603}]
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:139)
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:106)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:73)
... 34 more
After it I've tried several thing:
Reverted spring.cloud.stream.function.definition to fun (disable sup bean binding to the external destination). Application started, function worked, supplier didn't work. Everything as expected.
Changed spring.cloud.stream.function.definition to sup (disable fun bean binding to the external destination). Application started, function didn't work, supplier worked (produced message to my-sup-out topic every second). Everything as expected as well.
Updated spring.cloud.stream.function.definition value to fun;sup. Application didn't start, got same MessageDeliveryException.
Swapped spring.cloud.stream.function.definition value to sup;fun. Application started, supplier worked, but function didn't work (didn't send messages to my-fun-out topic).
The last one confused me even more than error) So now I need someone's help to sort thing out.
Did I miss something in cofiguration? Why changing beans order separated by ; in spring.cloud.stream.function.definition leads to different results?
Full project is uploaded to GitHub and added below:
StreamApplication.java:
package com.kaine;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import reactor.core.publisher.Flux;
import java.util.function.Function;
import java.util.function.Supplier;
#SpringBootApplication
public class StreamApplication {
public static void main(String[] args) {
SpringApplication.run(StreamApplication.class);
}
#Bean
public Function<String, String> fun() {
return value -> value.toUpperCase();
}
#Bean
public Supplier<Flux<String>> sup() {
return () -> Flux.from(emitter -> {
while (true) {
try {
emitter.onNext("Hello from Supplier!");
Thread.sleep(1000);
} catch (Exception e) {
// ignore
}
}
});
}
}
application.yml
spring:
cloud:
stream:
function:
definition: fun;sup
bindings:
fun-in-0:
destination: my-fun-in
fun-out-0:
destination: my-fun-out
sup-out-0:
destination: my-sup-out
build.gradle.kts:
plugins {
java
}
group = "com.kaine"
version = "1.0-SNAPSHOT"
repositories {
mavenCentral()
}
dependencies {
implementation(platform("org.springframework.cloud:spring-cloud-dependencies:Hoxton.SR1"))
implementation("org.springframework.cloud:spring-cloud-starter-stream-kafka")
implementation(platform("org.springframework.boot:spring-boot-dependencies:2.2.2.RELEASE"))
}
configure<JavaPluginConvention> {
sourceCompatibility = JavaVersion.VERSION_11
}
Actually this is a problem with our documentation as I believe we provide a bad example of the reactive Supplier for his case. The issue is that your Supplier is in an infinite blocking loop. It basically never returns.
So please change it to something like:
#Bean
public Supplier<Flux<String>> sup() {
return () -> Flux.fromStream(Stream.generate(new Supplier<String>() {
#Override
public String get() {
try {
Thread.sleep(1000);
return "Hello from Supplier";
} catch (Exception e) {
// ignore
}
}
})).subscribeOn(Schedulers.elastic()).share();
}

How do I configure this property with Spring Boot and an embedded Tomcat?

Do I configure properties like the connectionTimeout in the application.properties file or is the somewhere else to do it? I can't figure this out from Google.
Tomcat properties list
I found this Spring-Boot example, but it does not include a connectionTimeout property and when I set server.tomcat.connectionTimeout=60000 in my application.properties file I get an error.
Spring Boot 1.4 and later
As of Spring Boot 1.4 you can use the property server.connection-timeout. See Spring Boot's common application properties.
Spring Boot 1.3 and earlier
Provide a customized EmbeddedServletContainerFactory bean:
#Bean
public EmbeddedServletContainerFactory servletContainerFactory() {
TomcatEmbeddedServletContainerFactory factory = new TomcatEmbeddedServletContainerFactory();
factory.addConnectorCustomizers(connector ->
((AbstractProtocol) connector.getProtocolHandler()).setConnectionTimeout(10000));
// configure some more properties
return factory;
}
If you are not using Java 8 or don't want to use Lambda Expressions, add the TomcatConnectorCustomizer like this:
factory.addConnectorCustomizers(new TomcatConnectorCustomizer() {
#Override
public void customize(Connector connector) {
((AbstractProtocol) connector.getProtocolHandler()).setConnectionTimeout(10000);
}
});
The setConnectionTimeout() method expects the timeout in milliseconds (see connectionTimeout in Apache Tomcat 8 Configuration Reference).
I prefer set of system properties before the server start:
/**
* Start SpringBoot server
*/
#SpringBootApplication(scanBasePackages= {"com.your.conf.package"})
//#ComponentScan(basePackages = "com.your.conf.package")
public class Application {
public static void main(String[] args) throws Exception {
System.setProperty("server.port","8132"));
System.setProperty("server.tomcat.max-threads","200");
System.setProperty("server.connection-timeout","60000");
ApplicationContext ctx = SpringApplication.run(Application.class, args);
}
}
After spring boot 2.x and later,
the implement method of the embeding tomcat has been changed.
refer to the code below.
import org.apache.coyote.http11.AbstractHttp11Protocol;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory;
import org.springframework.boot.web.server.WebServerFactoryCustomizer;
import org.springframework.context.annotation.Configuration;
import lombok.extern.slf4j.Slf4j;
#Slf4j
#Configuration
public class TomcatCustomizer implements WebServerFactoryCustomizer<TomcatServletWebServerFactory> {
#Autowired
private ContainerProperties containerProperties;
#Override
public void customize(TomcatServletWebServerFactory factory) {
factory.addConnectorCustomizers(connector -> {
AbstractHttp11Protocol protocol = (AbstractHttp11Protocol) connector.getProtocolHandler();
protocol.setMaxKeepAliveRequests(10);
log.info("####################################################################################");
log.info("#");
log.info("# TomcatCustomizer");
log.info("#");
log.info("# custom maxKeepAliveRequests {}", protocol.getMaxKeepAliveRequests());
log.info("# origin keepalive timeout: {} ms", protocol.getKeepAliveTimeout());
log.info("# keepalive timeout: {} ms", protocol.getKeepAliveTimeout());
log.info("# connection timeout: {} ms", protocol.getConnectionTimeout());
log.info("# max connections: {}", protocol.getMaxConnections());
log.info("#");
log.info(
"####################################################################################");
});
}
}
It's actually supposed to be server.connection-timeout in your application.properties. Reference, I suggest you bookmark it.

Categories

Resources