how can multiple users send message with rabbitmq using spring boot - java

My aim: i have multiple jobs(Processes) running parallely (seperate threads). i want to implement messaging so that each process can send message(if required) to rabbitmq Server.
now i have this
#Configuration
public class SenderConfiguration {
String content = "";
String host = "";
String port = "";
String userName = "";
String password = "";
String queueName = "";
InputStream input = null;
public SenderConfiguration() {
init();
}
private void init() {
Properties prop = new Properties();
try {
input = new FileInputStream("R.CONFIGURATION_FILE_PATH");
host = prop.getProperty("messaging.host");
port = prop.getProperty("messaging.port");
userName = prop.getProperty("messaging.userName");
password = prop.getProperty("messaging.password");
queueName = prop.getProperty("messaging.queue");
} catch (FileNotFoundException e) {
e.printStackTrace();
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
#Bean
public RabbitTemplate rabbitTemplate() {
RabbitTemplate template = new RabbitTemplate(connectionFactory());
template.setRoutingKey(this.queueName);
return template;
}
#Bean
public ConnectionFactory connectionFactory() {
CachingConnectionFactory connectionFactory = new CachingConnectionFactory(
this.host);
connectionFactory.setUsername(userName);
connectionFactory.setPassword(password);
return connectionFactory;
}
#Bean
public ScheduledProducer scheduledProducer() {
return new ScheduledProducer();
}
#Bean
public BeanPostProcessor postProcessor() {
return new ScheduledAnnotationBeanPostProcessor();
}
static class ScheduledProducer {
#Autowired
private volatile RabbitTemplate rabbitTemplate;
private final AtomicInteger counter = new AtomicInteger();
#Scheduled(fixedRate = 1000)
public void sendMessage(String message) {
rabbitTemplate.convertAndSend("Roxy " + counter.incrementAndGet());
}
}
}
and to call this from one of my operation
new AnnotationConfigApplicationContext(SenderConfiguration.class);
shall i make it abstract class and my every operation /process should extend it ? what would be best approach ?
and can i make above process any better?

Just use single class with property placeholders...
Use
#Value("${messaging.host}")
String host;
etc.
No need for a subclass for each.

Related

Initialize class with static method not managed by Spring

I have a class called ConfigManagement which uses only static methods/fields. One of the static methods, called initializeConfig() takes a Property object (points at application.properties) as input and populates the fields and calls some other methods with the values from the application.properties file.
public class ConfigManagement {
private static String signatureAlgorithm;
private static String myName;
private static RSAPublicKey myPublicKey;
private static RSAPrivateKey myPrivateKey;
private static HashMap<String, RSAPublicKey> peerPubKeys = new HashMap<String, RSAPublicKey>();
private static boolean isInitialized = false;
/**
* #return the signatureAlgorithm
*/
public static void initializeConfig(Properties props) {
signatureAlgorithm = props.getProperty("cybertrust.crypto.signatureAlgorithm");
myName = props.getProperty("cybertrust.crypto.myName");
try {
try {
myPublicKey = Loader.getPublicKeyFromCertificateFile(props.getProperty("cybertrust.crypto.myCertificate"));
}
catch (Exception e) {
throw new IllegalStateException("cybertrust.crypto.myCertificate is empty, the file is not found or it contains invalid data");
}
try {
myPrivateKey = Loader.getPrivateKeyFromFile(props.getProperty("cybertrust.crypto.myPrivateKey"));
}
catch (Exception e) {
throw new IllegalStateException("cybertrust.crypto.myPrivateKey is empty, the file is not found or it contains invalid data");
}
peerPubKeys.put(myName, myPublicKey);
int peerCounter = 0;
do {
String peerNameProp = String.format("cybertrust.crypto.peerModules.%d.name", peerCounter);
String peerName = props.getProperty(peerNameProp);
if (peerName == null)
break;
String peerNameCertFileProp = String.format("cybertrust.crypto.peerModules.%d.certificate", peerCounter);
String peerNameCertFile = props.getProperty(peerNameCertFileProp);
if (peerNameCertFile == null) // Do not halt the program, produce though an error
Logger.getLogger("ConfigManagement").log(Level.SEVERE,
String.format("Property %s not found while property %s is defined", peerNameCertFile, peerNameProp));
// instantiate public key from file
try {
RSAPublicKey peerRsaPubKey = Loader.getPublicKeyFromCertificateFile(peerNameCertFile);
peerPubKeys.put(peerName, peerRsaPubKey);
}
catch (Exception e) {
Logger.getLogger("ConfigManagement").log(Level.SEVERE,
String.format("File %s specified in property %s not found or does not contains a valid RSA key", peerNameCertFile, peerNameCertFileProp)); }
peerCounter++;
} while (true);
}
catch (Exception e) {
throw(e);
}
if ((myPublicKey == null) || (signatureAlgorithm == null) || (myName == null))
throw new IllegalStateException("one of the properties cybertrust.crypto.signatureAlgorithm, cybertrust.crypto.myName, cybertrust.crypto.myPublicKey, cybertrust.crypto.myPrivateKey is not defined");
isInitialized = true;
}
private static void testInitialized() {
if (!isInitialized)
throw new IllegalStateException("The configuration has not been initialized");
}
public static String getSignatureAlgorithm() {
testInitialized();
return signatureAlgorithm;
}
/**
* #return the myName
*/
public static String getMyName() {
testInitialized();
return myName;
}
/**
* #return the myPublicKey
*/
public static RSAPublicKey getMyPublicKey() {
testInitialized();
return myPublicKey;
}
/**
* #return the myPrivateKey
*/
public static RSAPrivateKey getMyPrivateKey() {
testInitialized();
return myPrivateKey;
}
public static RSAPublicKey getPublicKey(String peerName) throws NoSuchElementException {
testInitialized();
RSAPublicKey result = peerPubKeys.get(peerName);
if (result == null)
throw new NoSuchElementException("No known key for module " + peerName);
else
return result;
}
}
The application.properties file looks something like this:
cybertrust.crypto.myName=tms1235.cybertrust.eu
cybertrust.crypto.myCertificate=tms1235.cert.pem
cybertrust.crypto.myPrivateKey=tms1235.key.pem
cybertrust.crypto.signatureAlgorithm=SHA256withRSA
cybertrust.crypto.peerModules.0.name=sga1234.cybertrust.eu
cybertrust.crypto.peerModules.0.certificate=sga1234.cert.pem
cybertrust.crypto.peerModules.1.name=tms1234.cybertrust.eu
cybertrust.crypto.peerModules.1.certificate=tms1234.cert.pem
In a simple Java project I run ConfigManagement.initializeConfig(props); in main() and the fields are initialized and I can use the rest of the methods. In Spring it's not that simple.
I am trying to integrate this code in a SpringBoot application and I don't know how/where to initialize this class.
I am posting the Spring configuration for reference:
#Configuration
#EnableWebMvc
#EnableTransactionManagement
#ComponentScan("com.cybertrust.tms")
//#PropertySource({ "classpath:persistence-mysql.properties" })
#PropertySource({ "classpath:model.properties" })
public class DemoAppConfig implements WebMvcConfigurer {
#Autowired
private Environment env;
private Logger logger = Logger.getLogger(getClass().getName());
// define a bean for ViewResolver
#Bean
public DataSource myDataSource() {
// create connection pool
ComboPooledDataSource myDataSource = new ComboPooledDataSource();
// set the jdbc driver
try {
myDataSource.setDriverClass("com.mysql.cj.jdbc.Driver");
}
catch (PropertyVetoException exc) {
throw new RuntimeException(exc);
}
// for sanity's sake, let's log url and user ... just to make sure we are reading the data
logger.info("jdbc.url=" + env.getProperty("spring.datasource.url"));
logger.info("jdbc.user=" + env.getProperty("spring.datasource.username"));
// set database connection props
myDataSource.setJdbcUrl(env.getProperty("spring.datasource.url"));
myDataSource.setUser(env.getProperty("spring.datasource.username"));
myDataSource.setPassword(env.getProperty("spring.datasource.password"));
// set connection pool props
myDataSource.setInitialPoolSize(getIntProperty("connection.pool.initialPoolSize"));
myDataSource.setMinPoolSize(getIntProperty("connection.pool.minPoolSize"));
myDataSource.setMaxPoolSize(getIntProperty("connection.pool.maxPoolSize"));
myDataSource.setMaxIdleTime(getIntProperty("connection.pool.maxIdleTime"));
return myDataSource;
}
private Properties getHibernateProperties() {
// set hibernate properties
Properties props = new Properties();
props.setProperty("hibernate.dialect", env.getProperty("hibernate.dialect"));
props.setProperty("hibernate.show_sql", env.getProperty("hibernate.show_sql"));
props.setProperty("hibernate.hbm2ddl.auto", env.getProperty("hibernate.hbm2ddl.auto"));
return props;
}
// need a helper method
// read environment property and convert to int
private int getIntProperty(String propName) {
String propVal = env.getProperty(propName);
// now convert to int
int intPropVal = Integer.parseInt(propVal);
return intPropVal;
}
#Bean
public LocalSessionFactoryBean sessionFactory(){
// create session factorys
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
// set the properties
sessionFactory.setDataSource(myDataSource());
sessionFactory.setPackagesToScan(env.getProperty("hibernate.packagesToScan"));
sessionFactory.setHibernateProperties(getHibernateProperties());
return sessionFactory;
}
#Bean
#Autowired
public HibernateTransactionManager transactionManager(SessionFactory sessionFactory) {
// setup transaction manager based on session factory
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactory);
return txManager;
}
#Bean
public ModelMapper modelMapper() {
return new ModelMapper();
}
#Bean
public ConfigManagement configManagement() {
return new ConfigManagement();
}
}
And the Spring Boot main():
#Configuration
#EnableWebMvc
#EnableTransactionManagement
#ComponentScan("com.cybertrust.tms")
//#PropertySource({ "classpath:persistence-mysql.properties" })
#PropertySource({ "classpath:model.properties" })
//#EnableAutoConfiguration(exclude = HibernateJpaAutoConfiguration.class)
#SpringBootApplication(exclude = {HibernateJpaAutoConfiguration.class})
public class TMS extends SpringBootServletInitializer {
public static void main(String[] args) throws Exception {
SpringApplication.run(TMS.class, args);
}
}
Your static solution won't work in a Spring environment as is, because static can be executed before Spring is up and loaded all beans and properties
You should rewrite your code in a Spring way by getting propertyies using #Value
Injecting a property with the #Value annotation is straightforward:
#Value( "${jdbc.url}" )
private String jdbcUrl;
In order to integrate this code to a Spring project, I had to:
Make the class a bean, managed by Spring, by adding it in my configuration file that I posted in my question, I added this:
#Bean
public ConfigManagement configManagement() {
return new ConfigManagement();
}
Remove the static declaration from the class properties and use the #Value annotation to initialize them from the application.properties file, as suggested by #user7294900.
However, some of the class properties were not primitive types and couldn't be initialized directly from the application.properties. They needed some "business-logic" to be run at initialization time. In order to achieve that, I had to remove the static declaration and add the #PostConstruct annotation in the initializeConfig() method, which is the one that handled the initialization of the rest of the properties.
public class ConfigManagement {
#Value("${cybertrust.crypto.signatureAlgorithm}")
private String signatureAlgorithm;
#Value("${cybertrust.crypto.myName}")
private String myName;
#Value("${cybertrust.crypto.myCertificate}")
private String myCertificate;
#Value("${cybertrust.crypto.myPrivateKey}")
private String myPrivateKey;
private RSAPublicKey myRSAPublicKey;
private RSAPrivateKey myRSAPrivateKey;
private HashMap<String, RSAPublicKey> peerPubKeys = new HashMap<String, RSAPublicKey>();
private boolean isInitialized = false;
int peerCounter;
/**
* #return the signatureAlgorithm
*/
public ConfigManagement() {
}
#PostConstruct
public void initializeConfig() throws Exception {
try {
try {
myRSAPublicKey = Loader.getPublicKeyFromCertificateFile("C:\\Users\\Findorgri\\git\\trust-management\\TMS-rest\\" + myCertificate);
}
catch (Exception e) {
throw new IllegalStateException("cybertrust.crypto.myCertificate is empty, the file is not found or it contains invalid data");
}
try {
myRSAPrivateKey = Loader.getPrivateKeyFromFile("C:\\Users\\Findorgri\\git\\trust-management\\TMS-rest\\" + myPrivateKey);
}
catch (Exception e) {
throw new IllegalStateException("cybertrust.crypto.myPrivateKey is empty, the file is not found or it contains invalid data");
}
peerPubKeys.put(myName, myRSAPublicKey);
Properties props = loadProperties("C:\\Users\\Findorgri\\git\\trust-management\\TMS-rest\\src\\main\\resources\\application.properties");
if (props == null) {
throw new Exception("Properties file not found");
}
peerCounter = 0;
do {
String peerNameProp = String.format("cybertrust.crypto.peerModules.%d.name", peerCounter);
String peerName = props.getProperty(peerNameProp);
System.out.println("####TEST####\n" + peerNameProp + "\n" + peerName +"\n####TEST####");
if (peerName == null)
break;
String peerNameCertFileProp = String.format("cybertrust.crypto.peerModules.%d.certificate", peerCounter);
String peerNameCertFile = props.getProperty(peerNameCertFileProp);
System.out.println("####TEST####\n" + peerNameCertFileProp + "\n" + peerNameCertFile +"\n####TEST####");
if (peerNameCertFile == null) // Do not halt the program, produce though an error
Logger.getLogger("ConfigManagement").log(Level.SEVERE,
String.format("Property %s not found while property %s is defined", peerNameCertFile, peerNameProp));
// instantiate public key from file
try {
RSAPublicKey peerRsaPubKey = Loader.getPublicKeyFromCertificateFile("C:\\Users\\Findorgri\\git\\trust-management\\TMS-rest\\" + peerNameCertFile);
peerPubKeys.put(peerName, peerRsaPubKey);
}
catch (Exception e) {
Logger.getLogger("ConfigManagement").log(Level.SEVERE,
String.format("File %s specified in property %s not found or does not contains a valid RSA key", peerNameCertFile, peerNameCertFileProp)); }
peerCounter++;
} while (true);
}
catch (Exception e) {
throw(e);
}
if ((myRSAPublicKey == null) || (signatureAlgorithm == null) || (myName == null))
throw new IllegalStateException("one of the properties cybertrust.crypto.signatureAlgorithm, cybertrust.crypto.myName, cybertrust.crypto.myPublicKey, cybertrust.crypto.myPrivateKey is not defined");
isInitialized = true;
peerPubKeys.forEach((key, value) -> System.out.println(key + ":" + value));
}
....
Finally, for completeness' sake, for the initializeConfig() method to have access at the application.properties I had to use this method:
private static Properties loadProperties(String fileName) throws IOException {
FileInputStream fis = null;
Properties prop = null;
try {
fis = new FileInputStream(fileName);
prop = new Properties();
prop.load(fis);
} catch(FileNotFoundException fnfe) {
fnfe.printStackTrace();
} catch(IOException ioe) {
ioe.printStackTrace();
} finally {
fis.close();
}
return prop;
}

Socket Server via Singleton with parameters from properties file

I am going to run Socket Server via Singleton Pattern because I have multiple threads, and every time I call it, I want to use the same socket server. This is SocketSingleton.java class :
public class SocketSingleton {
private static ServerSocket serverSocket = null;
private SocketSingleton() {}
public static synchronized ServerSocket getServerSocket() throws IOException {
PropertiesKey prop = new PropertiesKey();
if (serverSocket == null) {
serverSocket = new ServerSocket(prop.getSocketPort());
}
return serverSocket;
}
}
But I've noticed that I should get my few values from configuration.properties like SOCKET_PORT=2203
I can get the values from configuration with the code bellow
public class PropertiesAlgorithmImpl implements PropertiesAlgorithm {
private static Properties defaultProps = new Properties();
static {
try {
String propertiesDirectory = "src/main/resources/configuration.properties";
FileInputStream in = new FileInputStream(propertiesDirectory);
if (in == null) {
System.out.println("Sorry, unable to find " + propertiesDirectory);
}
defaultProps.load(in);
in.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public String getValuesFromProperties(String key) {
if (defaultProps.getProperty(key) != null) {
return defaultProps.getProperty(key);
}
return "Sorry, unable to find " + key ;
}
}
This is an enum of Socket Port.
public enum CONFIG { SOCKET_PORT}
public class PropertiesKey {
private PropertiesAlgorithm propertiesAlgorithm;
public int getSocketPort(){
propertiesAlgorithm = new PropertiesAlgorithmImpl();
return Integer.parseInt(propertiesAlgorithm.getValuesFromProperties(CONFIG.SOCKET_PORT.name()));
}
In the SocketSingleton class, I am calling socket port like this:
serverSocket = new ServerSocket(prop.getSocketPort());
What is the possible reason that I can't get the socket port parameters from configuration.properties?
I fixed the class bellow with input stream and it worked:
public class PropertiesAlgorithmImpl implements PropertiesAlgorithm {
private static final Logger logger = LoggerFactory.getLogger(PropertiesAlgorithmImpl.class);
private static Properties defaultProps = new Properties();
static {
String propertiesDirectory = "config.properties";
try (InputStream input = PropertiesAlgorithmImpl.class.getClassLoader().getResourceAsStream(propertiesDirectory)) {
if (input == null) {
logger.info("Sorry, unable to find " + propertiesDirectory);
}
defaultProps.load(input);
input.close();
} catch (Exception e) { logger.info(String.valueOf(e.getStackTrace())); }
}
public String getValuesFromProperties(String key) {
if (defaultProps.getProperty(key) != null) {
return defaultProps.getProperty(key);
}
logger.info("Sorry, unable to find " + key);
return "Sorry, unable to find " + key ;
}

How to use AmazonSQS listener with two accounts

I have application with two worker classes. I want them to pull from AWS SQS ,but from two different accounts.
I am using #SQSListener to achive this. I am having trouble to set the right AmazonSQS client for each queue.Tried to use custom destionationResolver but again it cannot access the right amazonSQS client bean.
I'm using AmazonSQSAsync maybe this is part of the problem. Whit the custom destination resolver i am getting access denied for one of the queues.
My config code:
#Bean(destroyMethod = "shutdown")
#Primary
public AmazonSQSAsync amazonSQS() {
AmazonSQSAsync amazonSQSAsyncClient = new AmazonSQSAsyncClient(new AWSCredentialsProvider() {
public void refresh() {}
public AWSCredentials getCredentials() {
return new AWSCredentials() {
public String getAWSSecretKey() {return secretKey;}
public String getAWSAccessKeyId() {return accessKey;}
};
}
});
QueueBufferConfig config = new QueueBufferConfig();
config.setMaxBatchOpenMs(maxBatchOpenMs);
config.setMaxBatchSize(maxBatchSize);
LOGGER.info("SQS Client Initialized Successfully");
return new AmazonSQSBufferedAsyncClient(amazonSQSAsyncClient, config);
}
#Bean(destroyMethod = "shutdown")
#Qualifier("workerSQS")
public AmazonSQSAsync workerSQS() {
final ClientConfiguration cc = new ClientConfiguration();
cc.setConnectionTimeout(listenerConnectionTimeout);
cc.setSocketTimeout(listenerSocketTimeout);
cc.setMaxConnections(listenerMaxConnection);
cc.setRequestTimeout(listenerRequestTimeout);
cc.setUseReaper(true);
//cc.setConnectionMaxIdleMillis();
AWSCredentialsProvider awsCredentialsProvider = new AWSCredentialsProvider() {
public void refresh() {}
public AWSCredentials getCredentials() {
return new AWSCredentials() {
public String getAWSSecretKey() {return routingSecretKey;}
public String getAWSAccessKeyId() {return routingAccessKey;}
};
}
};
AmazonSQSAsync amazonSQSAsyncClient = AmazonSQSAsyncClientBuilder.standard()
.withCredentials(awsCredentialsProvider)
.withRegion(Regions.US_EAST_1)
.withClientConfiguration(cc)
.build();
// See https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-client-side-buffering-request-batching.html
// for QueueBufferConfig Configuration Parameters
QueueBufferConfig config = new QueueBufferConfig();
config.setLongPoll(true);
return new AmazonSQSBufferedAsyncClient(amazonSQSAsyncClient, config);
}
#Bean
public SimpleMessageListenerContainerFactory simpleMessageListenerContainerFactory() {
SimpleMessageListenerContainerFactory msgListenerContainerFactory = new SimpleMessageListenerContainerFactory();
msgListenerContainerFactory.setBackOffTime(listenerBackOffTime);
msgListenerContainerFactory.setWaitTimeOut(listenerWaitTimeOut);
msgListenerContainerFactory.setVisibilityTimeout(listenerVisibilityTimeOut);
msgListenerContainerFactory.setMaxNumberOfMessages(listenerMaxMessagesPerPoll);
msgListenerContainerFactory.setDestinationResolver(destinationResolver());
return msgListenerContainerFactory;
}
#Bean
public CustomDestinationResolver destinationResolver(){
return new CustomDestinationResolver();
}
#Component
public static class CustomDestinationResolver implements DestinationResolver{
#Autowired
private AmazonSQS amazonSQS;
#Autowired
#Qualifier("workerSQS")
private AmazonSQSAsync amazonSQSAsync;
#Override
public String resolveDestination(String name) throws DestinationResolutionException {
String queueName = name;
if (queueName.startsWith("tl")) {
try {
GetQueueUrlResult getQueueUrlResult = amazonSQSAsync.getQueueUrl(new GetQueueUrlRequest(name));
return getQueueUrlResult.getQueueUrl();
} catch (QueueDoesNotExistException var4) {
throw new DestinationResolutionException(var4.getMessage(), var4);
}
} else {
try {
GetQueueUrlResult getQueueUrlResult = amazonSQS.getQueueUrl(new GetQueueUrlRequest(name));
return getQueueUrlResult.getQueueUrl();
} catch (QueueDoesNotExistException var4) {
throw new DestinationResolutionException(var4.getMessage(), var4);
}
}
}
}
I was not able to do it with SQS Listener,so i tried with JMS listener and it worked.
I simply created two JMS listenerContainerFactory and used them. Each listener have different AWS account

Spring Integration FTP remove local files after use (Spring Boot)

I am trying to write a program that can take a file from one server via ftp and place it on another server via ftp. However, I am having issues deleting the local file after it has been written. Being able to save it locally is not an issue as long as it is temporary. I have tried using an ExpressionEvaluatingRequestHandlerAdvice with an OnSuccessExpression and I could not get it to actually use the expression. The code is here:
#Configuration
#EnableConfigurationProperties(FTPConnectionProperties.class)
public class FTPConfiguration {
private FTPConnectionProperties ftpConnectionProperties;
public FTPConfiguration(FTPConnectionProperties ftpConnectionProperties) {
this.ftpConnectionProperties = ftpConnectionProperties;
}
#Bean
public SessionFactory<FTPFile> ftpInputSessionFactory() {
DefaultFtpSessionFactory sf = new DefaultFtpSessionFactory();
sf.setHost(ftpConnectionProperties.getInputServer());
sf.setUsername(ftpConnectionProperties.getInputFtpUser());
sf.setPassword(ftpConnectionProperties.getInputFtpPassword());
return new CachingSessionFactory<>(sf);
}
#Bean
public SessionFactory<FTPFile> ftpOutputSessionFactory() {
DefaultFtpSessionFactory sf = new DefaultFtpSessionFactory();
sf.setHost(ftpConnectionProperties.getOutputServer());
sf.setUsername(ftpConnectionProperties.getOutputFtpUser());
sf.setPassword(ftpConnectionProperties.getOutputFtpPassword());
return new CachingSessionFactory<>(sf);
}
#Bean
public FtpInboundFileSynchronizer ftpInboundFileSynchronizer() {
FtpInboundFileSynchronizer fileSynchronizer = new FtpInboundFileSynchronizer(ftpInputSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(true);
fileSynchronizer.setRemoteDirectory(ftpConnectionProperties.getInputDirectory());
fileSynchronizer.setFilter(new FtpSimplePatternFileListFilter("*.TIF"));
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(channel = "input", poller = #Poller(fixedDelay = "5000"))
public MessageSource<File> ftpMessageSource() {
FtpInboundFileSynchronizingMessageSource source = new FtpInboundFileSynchronizingMessageSource(ftpInboundFileSynchronizer());
source.setLocalDirectory(new File("ftp-inbound"));
source.setAutoCreateLocalDirectory(true);
source.setLocalFilter(new FileSystemPersistentAcceptOnceFileListFilter(new SimpleMetadataStore(), ""));
return source;
}
#Bean
#ServiceActivator(inputChannel = "input")
public MessageHandler handler() {
FtpMessageHandler handler = new FtpMessageHandler(ftpOutputSessionFactory());
handler.setRemoteDirectoryExpression(new LiteralExpression(ftpConnectionProperties.getOutputDirectory()));
handler.setFileNameGenerator(message -> {
if (message.getPayload() instanceof File) {
return ((File) message.getPayload()).getName();
} else {
throw new IllegalArgumentException("File expected as payload.");
}
});
return handler;
}
}
It is handling the remote files exactly as expected, deleting the remote file from the source and putting into the output, but not removing the local file after use.
I would suggest you to make that input channel as a PublishSubscribeChannel and add one more simple subscriber:
#Bean
public PublishSubscribeChannel input() {
return new PublishSubscribeChannel();
}
#Bean
#ServiceActivator(inputChannel = "input")
public MessageHandler handler() {
...
}
#Bean
#ServiceActivator(inputChannel = "input")
public MessageHandler deleteLocalFileService() {
return m -> ((File) message.getPayload()).delete();
}
This way the same message with the File payload is going to be sent first to your FtpMessageHandler and only after that to this new deleteLocalFileService for removing the local file based on the payload.
Simple Solution to fetch file from SFTP server and then move that file to other folder with different name.
#Bean
public SessionFactory<ChannelSftp.LsEntry> sftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
if (sftpServerProperties.getSftpPrivateKey() != null) {
factory.setPrivateKey(sftpServerProperties.getSftpPrivateKey());
factory.setPrivateKeyPassphrase(sftpServerProperties.getSftpPrivateKeyPassphrase());
} else {
factory.setPassword(sftpServerProperties.getPassword());
}
factory.setHost(sftpServerProperties.getSftpHost());
factory.setPort(sftpServerProperties.getSftpPort());
factory.setUser(sftpServerProperties.getSftpUser());
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(factory);
}
#Bean
public SftpInboundFileSynchronizer sftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(sftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setRemoteDirectory(sftpServerProperties.getSftpRemoteDirectoryDownload());
fileSynchronizer.setFilter(new SftpSimplePatternFileListFilter(sftpServerProperties.getSftpRemoteDirectoryDownloadFilter()));
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(channel = "fromSftpChannel", poller = #Poller(cron = "*/10 * * * * *"))
public MessageSource<File> sftpMessageSource() {
SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(
sftpInboundFileSynchronizer());
source.setLocalDirectory(util.createDirectory(Constants.FILES_DIRECTORY));
source.setAutoCreateLocalDirectory(true);
return source;
}
#Bean
#ServiceActivator(inputChannel = "fromSftpChannel")
public MessageHandler resultFileHandler() {
return (Message<?> message) -> {
String csvFilePath = util.getDirectory(Constants.FILES_DIRECTORY) + Constants.INSIDE + message.getHeaders().get("file_name");
util.readCSVFile(csvFilePath, String.valueOf(message.getHeaders().get("file_name")));
File file = (File) message.getPayload();
File newFile = new File(file.getPath() + System.currentTimeMillis());
try {
FileUtils.copyFile(file, newFile);
sftpGateway.sendToSftp(newFile);
} catch (Exception e) {
e.printStackTrace();
}
if (file.exists()) {
file.delete();
}
if (newFile.exists()) {
newFile.delete();
}
};
}
#Bean
#ServiceActivator(inputChannel = "toSftpChannelDest")
public MessageHandler handlerOrderBackUp() {
SftpMessageHandler handler = new SftpMessageHandler(sftpSessionFactory());
handler.setAutoCreateDirectory(true);
handler.setRemoteDirectoryExpression(new LiteralExpression(sftpServerProperties.getSftpRemoteBackupDirectory()));
return handler;
}
#MessagingGateway
public interface SFTPGateway {
#Gateway(requestChannel = "toSftpChannelDest")
void sendToSftp(File file);
}

Spurious error "Cannot correlate response - no pending reply" using TcpOutboundGateway and CachingClientConnectionFactory

I am getting spurious correlation errors using TcpOutboundGateway with CachingClientConnectionFactory in a multithreaded context.
The log message is:
2015-05-26 14:50:38.406 ERROR 3320 --- [pool-2-thread-2] o.s.i.ip.tcp.TcpOutboundGateway : Cannot correlate response - no pending reply
I do not get the error when sending from a single thread, and I have tested and 2 physical machines - Windows 7 and Fedora 20. I am using Spring boot
It results in a timeout error for on the send that does not recieve its response.
Below is my simplified code:
Note it does not always produce the error - it is spurious
The code Uses a TcpOutboundGateway and TcpInboundGateway, but in my actual application the server is legacy (not Spring) Java code, so I use CachingClientConnectionFactory to enhance performance
#Configuration
#ComponentScan
#EnableAutoConfiguration
public class Test {
//**************** Client **********************************************
#Bean
public MessageChannel replyChannel() {
return new DirectChannel();
}
#Bean
public MessageChannel sendChannel() {
MessageChannel directChannel = new DirectChannel();
return directChannel;
}
#Bean
AbstractClientConnectionFactory tcpNetClientConnectionFactory() {
AbstractClientConnectionFactory tcpNetClientConnectionFactory = new TcpNetClientConnectionFactory("localhost", 9003);
CachingClientConnectionFactory cachingClientConnectionFactory = new CachingClientConnectionFactory(tcpNetClientConnectionFactory, 4);
return cachingClientConnectionFactory;
}
#Bean
#ServiceActivator(inputChannel = "sendChannel")
TcpOutboundGateway tcpOutboundGateway() {
TcpOutboundGateway tcpOutboundGateway = new TcpOutboundGateway();
tcpOutboundGateway.setConnectionFactory(tcpNetClientConnectionFactory());
tcpOutboundGateway.setReplyChannel(replyChannel());
return tcpOutboundGateway;
}
//******************************************************************
//**************** Server **********************************************
#Bean
public MessageChannel receiveChannel() {
return new DirectChannel();
}
#Bean
TcpNetServerConnectionFactory tcpNetServerConnectionFactory() {
TcpNetServerConnectionFactory tcpNetServerConnectionFactory = new TcpNetServerConnectionFactory(9003);
tcpNetServerConnectionFactory.setSingleUse(false);
return tcpNetServerConnectionFactory;
}
#Bean
TcpInboundGateway tcpInboundGateway() {
TcpInboundGateway tcpInboundGateway = new TcpInboundGateway();
tcpInboundGateway.setConnectionFactory(tcpNetServerConnectionFactory());
tcpInboundGateway.setRequestChannel(receiveChannel());
return tcpInboundGateway;
}
//******************************************************************
#Bean
#Scope("prototype")
Worker worker() {
return new Worker();
}
public volatile static int lc = 4;
public volatile static int counter = lc;
public volatile static long totStartTime = 0;
public volatile static int messageCount = 0;
public static synchronized int incMessageCount(){
return ++messageCount;
}
public static void main(String args[]) {
//new LegaServer();
ConfigurableApplicationContext applicationContext = SpringApplication.run(Test.class, args);
totStartTime = System.currentTimeMillis();
for (int z = 0; z < lc; z++) {
new Thread((Worker) applicationContext.getBean("worker")).start();
}
try {
Thread.currentThread().sleep(20000);
} catch (InterruptedException e) {
e.printStackTrace();
}
applicationContext.stop();
}
}
#MessageEndpoint
class RequestHandler {
#ServiceActivator(inputChannel = "receiveChannel")
public String rxHandler(byte [] in) {
String s = new String(in);
System.out.println("rxHandler:"+s);
return "Blah blah " + s;
}
}
#MessageEndpoint
class ResponseHandler {
#ServiceActivator(inputChannel = "replyChannel")
public void replyHandler(byte [] in) {
System.out.println("replyHandler:"+new String(in));
}
}
class Worker implements Runnable {
#Autowired
#Qualifier("sendChannel")
MessageChannel dc;
#Override
public void run() {
Test.counter--;
int locMessageCount=0;
long startTime = System.currentTimeMillis();
for (int t = 0; t < 20; t++) {
locMessageCount = Test.incMessageCount();
Map hs = new HashMap<String, String>();
hs.put("context", new Integer(Test.counter));
GenericMessage message = new GenericMessage("this is a test message " + locMessageCount, hs);
try {
boolean sent = dc.send(message);
} catch (Exception e) {
//e.printStackTrace();
System.out.println("locMessageCount:"+locMessageCount);
}
}
if (locMessageCount == (Test.lc*20)) {
long totfinTime = System.currentTimeMillis();
System.out.println("Tot. Time taken: " + (totfinTime - Test.totStartTime));
System.out.println("Tot. TPS: " + (1000 * 20* Test.lc) / (totfinTime - Test.totStartTime));
System.out.println("Tot. messages: " + Test.messageCount);
}
}
}
Any suggestions would be greatly appreciated, as is the assistance I have received so far. TY
Thanks; this is a bug with the combo of the outbound gateway and caching connection factory; please open a JIRA Issue.
The problem is that the connection is added back to the pool (and reused) before the first thread (Thread-5) removes the pending reply; he ends up removing the new pending reply (for Thread-2) instead of his own.
Unfortunately, I don't have a simple work-around for you; it needs code changes in the gateway to fix it.

Categories

Resources