spring-boot-jms Redeliverypolicy bean ignored? - java

So I'm setting up Spring jms with activemq. And I'm testing my setup a bit.
I'm trying to setup a redeliverypolicy so it isn't instantly retried, but I noticed in my logging + activemq broker that it's instantly retried and not using my redeliverypolicy bean. Could anyone point me out what I'm doing wrong? According to what I could find in the docs it seems to be correct. (and if anyone sees why my individualdeadletterstrategy is ignored but put on the general DLQ always welcome).
#Bean
public MessageConverter jacksonJmsMessageConverter() {
MappingJackson2MessageConverter converter = new MappingJackson2MessageConverter();
converter.setTargetType(MessageType.TEXT);
converter.setTypeIdPropertyName("_type");
return converter;
}
#Bean
public DeadLetterStrategy deadLetterStrategy() {
IndividualDeadLetterStrategy deadLetterStrategy = new IndividualDeadLetterStrategy();
deadLetterStrategy.setQueueSuffix(".dlq");
deadLetterStrategy.setUseQueueForQueueMessages(true);
return deadLetterStrategy;
}
#Bean
public RedeliveryPolicy redeliveryPolicy() {
RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
redeliveryPolicy.setInitialRedeliveryDelay(5000);
redeliveryPolicy.setBackOffMultiplier(2);
redeliveryPolicy.setUseExponentialBackOff(true);
redeliveryPolicy.setMaximumRedeliveries(5);
return redeliveryPolicy;
}
#Bean
public Queue myQueue() {
return new ActiveMQQueue("myQueue");
}

deadLetterStrategy concerns broker, so you have to define beans as below
take a look at
http://activemq.apache.org/message-redelivery-and-dlq-handling.html
import java.util.ArrayList;
import java.util.List;
import javax.jms.ConnectionFactory;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.RedeliveryPolicy;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.region.policy.DeadLetterStrategy;
import org.apache.activemq.broker.region.policy.IndividualDeadLetterStrategy;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.stereotype.Component;
#SpringBootApplication
#Configuration
#ComponentScan(excludeFilters = #ComponentScan.Filter(type = FilterType.ANNOTATION, classes = { Configuration.class,
Component.class }))
public class ActiveMQConfigurationDeadLetterStrategy {
public static void main(String[] args) {
ConfigurableApplicationContext app = SpringApplication.run(ActiveMQConfigurationDeadLetterStrategy.class, args);
}
#Bean
public BrokerService broker() throws Exception {
final BrokerService broker = new BrokerService();
broker.addConnector("tcp://localhost:61616");
broker.addConnector("vm://localhost");
broker.setPersistent(false);
broker.setDestinationPolicy(policyMap());
return broker;
}
#Bean
public PolicyMap policyMap() {
PolicyMap destinationPolicy = new PolicyMap();
List<PolicyEntry> entries = new ArrayList<PolicyEntry>();
PolicyEntry queueEntry = new PolicyEntry();
queueEntry.setQueue(">");
queueEntry.setDeadLetterStrategy(deadLetterStrategy());
PolicyEntry topicEntry = new PolicyEntry();
topicEntry.setTopic(">");
topicEntry.setDeadLetterStrategy(deadLetterStrategy());
entries.add(queueEntry);
entries.add(topicEntry);
destinationPolicy.setPolicyEntries(entries);
return destinationPolicy;
}
#Bean
public DeadLetterStrategy deadLetterStrategy() {
IndividualDeadLetterStrategy deadLetterStrategy = new IndividualDeadLetterStrategy();
deadLetterStrategy.setQueueSuffix(".dlq");
deadLetterStrategy.setUseQueueForQueueMessages(true);
return deadLetterStrategy;
}
#Bean
public RedeliveryPolicy redeliveryPolicy() {
RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
redeliveryPolicy.setInitialRedeliveryDelay(5000);
redeliveryPolicy.setBackOffMultiplier(2);
redeliveryPolicy.setUseExponentialBackOff(true);
redeliveryPolicy.setMaximumRedeliveries(5);
return redeliveryPolicy;
}
#Bean
public ConnectionFactory jmsConnectionFactory() {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory();
connectionFactory.setRedeliveryPolicy(redeliveryPolicy());
return connectionFactory;
}
}
UPDATE
If you use an external AMQ the deadLetterStrategy can only be set in the destination policy map of the activemq.xml configuration file
For example :
<broker>
<destinationPolicy>
<policyMap>
<policyEntries>
<!-- Set the following policy on all queues using the '>' wildcard -->
<policyEntry queue=">">
<deadLetterStrategy>
<!--
Use the prefix 'DLQ.' for the destination name, and make
the DLQ a queue rather than a topic
-->
<individualDeadLetterStrategy queuePrefix="DLQ." useQueueForQueueMessages="true"/>
</deadLetterStrategy>
</policyEntry>
</policyEntries>
</policyMap>
</destinationPolicy>
</broker>
import java.util.ArrayList;
import java.util.List;
import javax.jms.ConnectionFactory;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.RedeliveryPolicy;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.region.policy.DeadLetterStrategy;
import org.apache.activemq.broker.region.policy.IndividualDeadLetterStrategy;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.stereotype.Component;
#SpringBootApplication
#Configuration
#ComponentScan(excludeFilters = #ComponentScan.Filter(type = FilterType.ANNOTATION, classes = { Configuration.class,
Component.class }))
public class ActiveMQConfigurationDeadLetterStrategy {
public static void main(String[] args) {
ConfigurableApplicationContext app = SpringApplication.run(ActiveMQConfigurationDeadLetterStrategy.class, args);
}
#Bean
public RedeliveryPolicy redeliveryPolicy() {
RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
redeliveryPolicy.setInitialRedeliveryDelay(5000);
redeliveryPolicy.setBackOffMultiplier(2);
redeliveryPolicy.setUseExponentialBackOff(true);
redeliveryPolicy.setMaximumRedeliveries(5);
return redeliveryPolicy;
}
#Bean
public ConnectionFactory jmsConnectionFactory() {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory("tcp://localhost:61616");
connectionFactory.setRedeliveryPolicy(redeliveryPolicy());
return connectionFactory;
}
}

Related

Trigger quartz job at certain time Java spring boot

I have issue when I launch the application quartz job in my JAVA Spring Boot project is triggered by itself despite it should trigger on the set certain timet for this case 12:00 o'clock every day. I tried to use with SimpleTriggerJobFactory class because I have issue with simple trigger class while configuring job.
My QuartzConfig class
package com.example.demo.config;
import com.example.demo.jobs.CurrencyImportJob;
import lombok.extern.slf4j.Slf4j;
import org.quartz.JobDetail;
import org.quartz.Trigger;
import org.quartz.spi.TriggerFiredBundle;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.JobDetailFactoryBean;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.scheduling.quartz.SimpleTriggerFactoryBean;
import org.springframework.scheduling.quartz.SpringBeanJobFactory;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
#Slf4j
#Configuration
public class QuartzConfig {
final ApplicationContext applicationContext;
public QuartzConfig(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
#Bean
SpringBeanJobFactory createSpringBeanJobFactory (){
return new SpringBeanJobFactory() {
#Override
protected Object createJobInstance
(final TriggerFiredBundle bundle) throws Exception {
final Object job = super.createJobInstance(bundle);
applicationContext
.getAutowireCapableBeanFactory()
.autowireBean(job);
return job;
}
};
}
#Bean
public SchedulerFactoryBean createSchedulerFactory
(SpringBeanJobFactory springBeanJobFactory, Trigger trigger) {
SchedulerFactoryBean schedulerFactory
= new SchedulerFactoryBean();
schedulerFactory.setAutoStartup(true);
schedulerFactory.setWaitForJobsToCompleteOnShutdown(true);
schedulerFactory.setTriggers(trigger);
springBeanJobFactory.setApplicationContext(applicationContext);
schedulerFactory.setJobFactory(springBeanJobFactory);
return schedulerFactory;
}
#Bean
public SimpleTriggerFactoryBean
createSimpleTriggerFactoryBean(JobDetail jobDetail)
{
LocalDateTime now = LocalDateTime.now().withHour(12).withMinute(0);
Date date = Date.from(now.atZone(ZoneId.systemDefault()).toInstant());
SimpleTriggerFactoryBean simpleTriggerFactory = new SimpleTriggerFactoryBean();
simpleTriggerFactory.setStartTime(date);
simpleTriggerFactory.setJobDetail(jobDetail);
simpleTriggerFactory.setStartDelay(0);
simpleTriggerFactory.setRepeatInterval(1000);
simpleTriggerFactory.setRepeatCount(0);
return simpleTriggerFactory;
}
#Bean
public JobDetailFactoryBean createJobDetailFactoryBean(){
JobDetailFactoryBean jobDetailFactory
= new JobDetailFactoryBean();
jobDetailFactory.setJobClass(CurrencyImportJob.class);
return jobDetailFactory;
}
}

Failed to configure a DataSource: 'url' attribute is not specified and no embedded datasource could be configured when using custom datasource format

When I start my spring boot application, it shows this error:
Failed to configure a DataSource: 'url' attribute is not specified and no embedded datasource could be configured.
this is my datasource configuation:
spring.datasource.type = com.alibaba.druid.pool.DruidDataSource
## master
spring.datasource.druid.illidan.master.name = primary_db
spring.datasource.druid.illidan.master.url = jdbc:mysql://127.0.0.1:3309/report-fat?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&useSSL=false&verifyServerCertificate=false
spring.datasource.druid.illidan.master.username = root
spring.datasource.druid.illidan.master.password = LogSsDk87Fm2RXSVHMnwjvtA5Nncs2kT7nWrRkB06BcBNPwyTnaf60jNG+ENeyuhYm+X1fWj59XHFiXqiw==
spring.datasource.druid.illidan.master.connection-properties = config.decrypt=true;config.decrypt.key=MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKwORT+pNusNtC433Kdmk18WTYbxu0gGAWBccQrK+h0PHElZrEsJDqj9yvNq2Rwzw6d/YwECio3bS+yMCAwEAAQ==
spring.datasource.druid.illidan.master.driver-class-name = com.mysql.cj.jdbc.Driver
#
# slave
spring.datasource.druid.illidan.slave.name = primary_db
spring.datasource.druid.illidan.slave.url = jdbc:mysql://127.0.0.1:3309/report-fat?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&useSSL=false&verifyServerCertificate=false
spring.datasource.druid.illidan.slave.username = root
spring.datasource.druid.illidan.slave.password = LogSsDk87Fm2RXSVHMnwjvtA5Nncs2kRT7nWrRkB06BcBNPwyTnbd+af60jNG+ENeyuhYm+X1fWj59XHFiXqiw==
spring.datasource.druid.illidan.slave.connection-properties = config.decrypt=true;config.decrypt.key=MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKwORT+pNusNtC433Kdmk18WTYbxu0gGAWBccQrK+h0PHElZrEsJDqj9yvNq2Rwzwx9PoWRn6d/YwECio3bS+yMCAwEAAQ==
spring.datasource.druid.illidan.slave.driver-class-name = com.mysql.cj.jdbc.Driver
and this is my datasource config:
package com.sportswin.soa.illidan.config;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import com.sportswin.soa.misc.config.db.ReadWriteSplitRoutingDataSource;
import com.sportswin.soa.misc.constant.db.ConstantPool;
import com.sportswin.soa.misc.interceptor.db.DefaultTimeInterceptor;
import com.sportswin.soa.misc.spring.aspect.db.UseSlaveAspect;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* #author dolphin
*/
#Configuration
#MapperScan(basePackages = {"com.sportswin.soa.illidan.dao"}, sqlSessionTemplateRef = "sqlSessionTemplate")
public class DataSourceConfig {
/**
* 数据库连接池类型
*/
#Value("${spring.datasource.type}")
private Class<? extends DataSource> dataSourceType;
#Bean(name = "dataSource")
#Qualifier(value = "dataSource")
#Primary
#DependsOn({ "masterDataSource", "slaveDataSource" })
public DataSource dataSource(#Qualifier("masterDataSource") DataSource masterDataSource,
#Qualifier("slaveDataSource") DataSource slaveDataSource) {
System.out.println(masterDataSource.toString());
System.out.println(slaveDataSource.toString());
ReadWriteSplitRoutingDataSource writeSplitRoutingDataSource = new ReadWriteSplitRoutingDataSource();
Map<Object, Object> targetDataSources = new HashMap<Object, Object>();
targetDataSources.put(ConstantPool.MASTER_KEY, masterDataSource);
targetDataSources.put(ConstantPool.SLAVE_KEY, slaveDataSource);
writeSplitRoutingDataSource.setTargetDataSources(targetDataSources);
writeSplitRoutingDataSource.setDefaultTargetDataSource(masterDataSource);
List<String> slaveDataSourceNames = new ArrayList<String>();
slaveDataSourceNames.add(ConstantPool.SLAVE_KEY);
UseSlaveAspect.setSlaveDataSourceNames(slaveDataSourceNames);
return writeSplitRoutingDataSource;
}
/**
* 主数据库(写数据库)
* #return
*/
#Bean(name = "masterDataSource", destroyMethod = "close", initMethod = "init")
#ConfigurationProperties(prefix = "spring.datasource.druid.illidan.master")
public DataSource masterDataSource() {
System.out.println("主库");
return DruidDataSourceBuilder.create().build();
}
/**
* 从数据库1(只读数据库)
* #return
*/
#Bean(name = "slaveDataSource", destroyMethod = "close", initMethod = "init")
#ConfigurationProperties(prefix = "spring.datasource.druid.illidan.slave")
public DataSource slaveDataSource(){
System.out.println("从库");
return DruidDataSourceBuilder.create().build();
}
#Autowired
private PaginationInterceptor paginationInterceptor;
#Autowired
private DefaultTimeInterceptor defaultTimeInterceptor;
#Bean(name = "sqlSessionFactory")
public SqlSessionFactory sqlSessionFactory(#Qualifier("dataSource") DataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setConfigLocation(new PathMatchingResourcePatternResolver().getResource("classpath:mybatis/mybatis-config.xml"));
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mybatis/mapper/*/*.xml"));
Interceptor[] plugins = {paginationInterceptor,defaultTimeInterceptor};
bean.setPlugins(plugins);
return bean.getObject();
}
#Bean(name = "transactionManager")
public DataSourceTransactionManager transactionManager(#Qualifier("dataSource") DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
#Bean(name = "sqlSessionTemplate")
public SqlSessionTemplate sqlSessionTemplate(#Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
}
why would this happen and what should I do to fix it?
Can you try moving your datasource type inside each datasource.
remove it from here -> spring.datasource.type = com.alibaba.druid.pool.DruidDataSource
## master
spring.datasource.druid.illidan.master.type = com.alibaba.druid.pool.DruidDataSource
spring.datasource.druid.illidan.master.name = primary_db
spring.datasource.druid.illidan.master.url = jdbc:mysql://127.0.0.1:3309/report-fat?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&useSSL=false&verifyServerCertificate=false
spring.datasource.druid.illidan.master.username = root
spring.datasource.druid.illidan.master.password = LogSsDk87Fm2RXSVHMnwjvtA5Nncs2kT7nWrRkB06BcBNPwyTnaf60jNG+ENeyuhYm+X1fWj59XHFiXqiw==
spring.datasource.druid.illidan.master.connection-properties = config.decrypt=true;config.decrypt.key=MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKwORT+pNusNtC433Kdmk18WTYbxu0gGAWBccQrK+h0PHElZrEsJDqj9yvNq2Rwzw6d/YwECio3bS+yMCAwEAAQ==
spring.datasource.druid.illidan.master.driver-class-name = com.mysql.cj.jdbc.Driver
#
# slave
spring.datasource.druid.illidan.slave.type = com.alibaba.druid.pool.DruidDataSource
spring.datasource.druid.illidan.slave.name = primary_db
spring.datasource.druid.illidan.slave.url = jdbc:mysql://127.0.0.1:3309/report-fat?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&transformedBitIsBoolean=true&useSSL=false&verifyServerCertificate=false
spring.datasource.druid.illidan.slave.username = root
spring.datasource.druid.illidan.slave.password = LogSsDk87Fm2RXSVHMnwjvtA5Nncs2kRT7nWrRkB06BcBNPwyTnbd+af60jNG+ENeyuhYm+X1fWj59XHFiXqiw==
spring.datasource.druid.illidan.slave.connection-properties = config.decrypt=true;config.decrypt.key=MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAKwORT+pNusNtC433Kdmk18WTYbxu0gGAWBccQrK+h0PHElZrEsJDqj9yvNq2Rwzwx9PoWRn6d/YwECio3bS+yMCAwEAAQ==
spring.datasource.druid.illidan.slave.driver-class-name = com.mysql.cj.jdbc.Driver

Spring Batch java config error using ClassifierCompositeItemWriter

I'm using spring batch with java configuration (new to this) and I'm running into a error when trying to use a ClassifierCompositeItemWriter so generate separate files based on a classifier.
The error im getting is org.springframework.batch.item.WriterNotOpenException: Writer must be open before it can be written to
My configuration looks like follows:
package com.infonova.btcompute.batch.geneva.job;
import com.infonova.btcompute.batch.billruntransfer.BillRunTranStatusFinishedJobAssignment;
import com.infonova.btcompute.batch.billruntransfer.BillRunTranStatusInprogressJobAssignment;
import com.infonova.btcompute.batch.billruntransfer.BillRunTransferStatus;
import com.infonova.btcompute.batch.geneva.camel.GenevaJobLauncher;
import com.infonova.btcompute.batch.geneva.dto.GenevaDetailsResultsDto;
import com.infonova.btcompute.batch.geneva.dto.GenveaDetailsTransactionDto;
import com.infonova.btcompute.batch.geneva.properties.GenevaDetailsExportJobProperties;
import com.infonova.btcompute.batch.geneva.rowmapper.GenevaDetailsTransactionsRowMapper;
import com.infonova.btcompute.batch.geneva.steps.*;
import com.infonova.btcompute.batch.repository.BillrunTransferStatusMapper;
import com.infonova.btcompute.batch.utils.FileNameGeneration;
import com.infonova.product.batch.camel.CamelEnabledJob;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.support.ClassifierCompositeItemWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.classify.BackToBackPatternClassifier;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.io.File;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public abstract class AbstractGenevaDetailsExportJob extends CamelEnabledJob {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGenevaDetailsExportJob.class);
#Autowired
protected JobBuilderFactory jobBuilders;
#Autowired
protected StepBuilderFactory stepBuilders;
#Autowired
protected DataSource datasource;
#Autowired
private BillrunTransferStatusMapper billrunTransferStatusMapper;
#Autowired
protected JdbcTemplate jdbcTemplate;
public abstract GenevaDetailsExportJobProperties jobProperties();
#Bean
public RouteBuilder routeBuilder(final GenevaDetailsExportJobProperties jobProperties, final Job job) {
return new RouteBuilder() {
#Override
public void configure() throws Exception {
from(jobProperties.getConsumer())
.transacted("PROPAGATION_REQUIRED")
.routeId(jobProperties.getInputRouteName())
.process(genevaJobLauncher(job));
//.to("ftp://app#127.0.0.1?password=secret");
}
};
}
#Bean
public Processor genevaJobLauncher(Job job) {
return new GenevaJobLauncher(job);
}
#Bean
#StepScope
public GenevaDetailsReader reader() {
GenevaDetailsReader reader = new GenevaDetailsReader(jobProperties().getMandatorKey(),
jobProperties().getInvoiceType(), jobProperties().getSqlResourcePath());
reader.setSql("");
reader.setDataSource(datasource);
reader.setRowMapper(new GenevaDetailsTransactionsRowMapper());
reader.setFetchSize(jobProperties().getFetchSize());
return reader;
}
#Bean
#StepScope
public GenevaDetailsItemProcessor processor() {
return new GenevaDetailsItemProcessor();
}
#Bean
#StepScope
public ClassifierCompositeItemWriter writer() {
List<String> serviceCodes = new ArrayList<>();//billrunTransferStatusMapper.getServiceCodes(jobProperties().getMandatorKey());
Long billingTaskId = billrunTransferStatusMapper.getCurrentTaskId(jobProperties().getMandatorKey());
String countryKey = billrunTransferStatusMapper.getCountryKey(billingTaskId);
serviceCodes.add("BTCC");
serviceCodes.add("CCMS");
BackToBackPatternClassifier classifier = new BackToBackPatternClassifier();
classifier.setRouterDelegate(new GenveaDetailsRouterClassifier());
HashMap<String, Object> map = new HashMap<>();
for (String serviceCode : serviceCodes) {
map.put(serviceCode, genevaDetailsWriter(serviceCode, countryKey));
}
classifier.setMatcherMap(map);
ClassifierCompositeItemWriter<GenveaDetailsTransactionDto> writer = new ClassifierCompositeItemWriter<>();
writer.setClassifier(classifier);
return writer;
}
#Bean
#StepScope
public GenevaDetailsFlatFileItemWriter genevaDetailsWriter(String serviceCode, String countryKey) {
GenevaDetailsFlatFileItemWriter writer = new GenevaDetailsFlatFileItemWriter(jobProperties().getDelimiter());
FileNameGeneration fileNameGeneration = new FileNameGeneration();
try {
FileSystemResource fileSystemResource = new FileSystemResource(new File(jobProperties().getExportDir(), fileNameGeneration.generateFileName(jdbcTemplate,
serviceCode, countryKey)));
writer.setResource(fileSystemResource);
} catch (SQLException e) {
LOGGER.error("Error creating FileSystemResource : " + e.getMessage());
}
return writer;
}
#Bean
public Job job() {
return jobBuilders.get(jobProperties().getJobName())
.start(setBillRunTransferStatusDetailInprogressStep())
.next(processGenevaDetailsStep())
.next(setBillRunTransferStatusProcessedStep())
.build();
}
#Bean
public Step setBillRunTransferStatusDetailInprogressStep() {
return stepBuilders.get("setBillRunTransferStatusDetailInprogressStep")
.tasklet(setBillRunTransferStatusDetailInprogress())
.build();
}
#Bean
public Tasklet setBillRunTransferStatusDetailInprogress() {
return new BillRunTranStatusInprogressJobAssignment(BillRunTransferStatus.SUMMARY.toString(), BillRunTransferStatus.DETAILS_INPROGRESS.toString(),
jobProperties().getMandatorKey(), jobProperties().getInvoiceTypeNum(), jobProperties().getReportTypeNum());
}
#Bean
public Step setBillRunTransferStatusProcessedStep() {
return stepBuilders.get("setBillRunTransferStatusProcessedStep")
.tasklet(setBillRunTransferStatusProcessed())
.build();
}
#Bean
public Tasklet setBillRunTransferStatusProcessed() {
return new BillRunTranStatusFinishedJobAssignment(BillRunTransferStatus.PROCESSED.toString());
}
#Bean
public Step processGenevaDetailsStep() {
return stepBuilders.get("processGenevaDetailsStep")
.<GenveaDetailsTransactionDto, GenevaDetailsResultsDto>chunk(jobProperties().getChunkSize())
.reader(reader())
.processor(processor())
.writer(writer())
.build();
}
}
and my writer looks like:
package com.infonova.btcompute.batch.geneva.steps;
import com.infonova.btcompute.batch.geneva.dto.GenevaDetailsResultsDto;
import com.infonova.btcompute.batch.repository.BillrunTransferStatusMapper;
import com.infonova.btcompute.batch.utils.FileNameGeneration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.item.*;
import org.springframework.batch.item.file.FlatFileHeaderCallback;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
#Component
public class GenevaDetailsFlatFileItemWriter extends FlatFileItemWriter<GenevaDetailsResultsDto> {
private static final Logger LOGGER = LoggerFactory.getLogger(GenevaDetailsFlatFileItemWriter.class);
#Autowired
protected JdbcTemplate jdbcTemplate;
#Autowired
private BillrunTransferStatusMapper billrunTransferStatusMapper;
private String delimiter;
public GenevaDetailsFlatFileItemWriter(String delimiter) {
this.delimiter = delimiter;
this.setLineAggregator(getLineAggregator());
this.setHeaderCallback(getHeaderCallback());
}
private DelimitedLineAggregator<GenevaDetailsResultsDto> getLineAggregator() {
DelimitedLineAggregator<GenevaDetailsResultsDto> delLineAgg = new DelimitedLineAggregator<>();
delLineAgg.setDelimiter(delimiter);
BeanWrapperFieldExtractor<GenevaDetailsResultsDto> fieldExtractor = new BeanWrapperFieldExtractor<>();
fieldExtractor.setNames(getNames());
delLineAgg.setFieldExtractor(fieldExtractor);
return delLineAgg;
}
private String[] getHeaderNames() {
return new String[] {"Record ID", "Service Identifier", "Billing Account Reference", "Cost Description", "Event Cost",
"Event Date and Time", "Currency Code", "Charge Category", "Order Identifier", "Net Usage", "UOM",
"Quantity", "Service Start Date", "Service End Date"};
}
private String[] getNames() {
return new String[] {"RECORD_ID", "SERVICE_CODE", "BILLING_ACCOUNT_REFERENCE", "COST_DESCRIPTION", "EVENT_COST",
"EVENT_DATE_AND_TIME", "CURRENCY_CODE", "CHARGE_CATEGORY", "ORDER_IDENTIFIER", "NET_USAGE", "UOM",
"QUANTITY", "SERVICE_START_DATE", "SERVICE_END_DATE"};
}
private FlatFileHeaderCallback getHeaderCallback()
{
return new FlatFileHeaderCallback() {
#Override
public void writeHeader(Writer writer) throws IOException {
writer.write(String.join(delimiter, getHeaderNames()));
}
};
}
// #BeforeStep
// public void beforeStep(StepExecution stepExecution) {
// billingTaskId = (Long) stepExecution.getJobExecution().getExecutionContext().get("billingTaskId");
// FileNameGeneration fileNameGeneration = new FileNameGeneration();
//
// try {
// FileSystemResource fileSystemResource = new FileSystemResource(new File(exportDir, fileNameGeneration.generateFileName(jdbcTemplate,
// serviceCode, billrunTransferStatusMapper.getCountryKey(billingTaskId))));
// setResource(fileSystemResource);
// } catch (SQLException e) {
// LOGGER.error("Error creating FileSystemResource : " + e.getMessage());
// }
// }
}
I have searched the web and cannot find a solution to this issue.
What #Hansjoerg Wingeier wrote about ClassifierCompositeItemWriter is correct, but the right way to resolve the problem is to register delegated writer(s) as stream(s) using AbstractTaskletStepBuilder.stream() to let SB manage execution context lifecycle.
ClassifierCompositeItemWriter does not implement the ItemStream interface, hence the open method of your FlatFileItemWriter is never called.
The easiest thing to do is to call the open method when you create your classifier map:
for (String serviceCode : serviceCodes) {
FlatFileItemWriter writer =genevaDetailsWriter(serviceCode, countryKey);
writer.open (new ExecutionContext ());
map.put(serviceCode, writer);
}

Can't Access Atmosphere MetaBroadcaster in Spring Boot Controller

I have a simple chat application set up using Spring Boot (1.3.2) and Atmosphere (2.4.2).
Here is my WebSocketConfigurer:
package com.chat.shared.websocket;
import java.util.Collections;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRegistration;
import org.apache.catalina.Context;
import org.apache.tomcat.websocket.server.WsSci;
import org.atmosphere.cpr.AtmosphereFramework;
import org.atmosphere.cpr.AtmosphereServlet;
import org.atmosphere.cpr.MetaBroadcaster;
import org.springframework.boot.context.embedded.ServletContextInitializer;
import org.springframework.boot.context.embedded.tomcat.TomcatContextCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.chat.privatechat.ChatChannel;
#Configuration
public class WebSocketConfigurer implements ServletContextInitializer {
#Bean
public TomcatEmbeddedServletContainerFactory tomcatContainerFactory() {
TomcatEmbeddedServletContainerFactory factory = new TomcatEmbeddedServletContainerFactory();
factory.setTomcatContextCustomizers(Collections.singletonList(tomcatContextCustomizer()));
return factory;
}
#Bean
public TomcatContextCustomizer tomcatContextCustomizer() {
return new TomcatContextCustomizer() {
#Override
public void customize(Context context) {
context.addServletContainerInitializer(new WsSci(), null);
}
};
}
#Bean
public AtmosphereServlet atmosphereServlet() {
return new AtmosphereServlet();
}
#Bean
public AtmosphereFramework atmosphereFramework() {
return atmosphereServlet().framework();
}
#Bean
public MetaBroadcaster metaBroadcaster() {
AtmosphereFramework framework = atmosphereFramework();
return framework.metaBroadcaster();
}
#Override
public void onStartup(ServletContext servletContext) throws ServletException {
configureAtmosphere(atmosphereServlet(), servletContext);
}
private void configureAtmosphere(AtmosphereServlet servlet, ServletContext servletContext) {
ServletRegistration.Dynamic reg = servletContext.addServlet("atmosphereServlet", servlet);
reg.setInitParameter("org.atmosphere.cpr.packages", ChatChannel.class.getPackage().getName());
reg.setInitParameter("org.atmosphere.cpr.broadcasterClass", "org.atmosphere.plugin.hazelcast.HazelcastBroadcaster");
reg.setInitParameter("org.atmosphere.cpr.broadcaster.maxProcessingThreads", "10");
reg.setInitParameter("org.atmosphere.cpr.broadcaster.maxAsyncWriteThreads", "10");
reg.setInitParameter("org.atmosphere.interceptor.HeartbeatInterceptor.clientHeartbeatFrequencyInSeconds", "10");
servletContext.addListener(new org.atmosphere.cpr.SessionSupport());
reg.addMapping("/chat/*");
reg.setLoadOnStartup(0);
reg.setAsyncSupported(true);
}
}
And here is how I'm currently leveraging it in the ChatChannel:
package com.chat.privatechat;
import com.chat.privatechat.DTOs.ChatMessageDTO;
import com.chat.shared.localmessagebus.LocalMessage;
import com.chat.shared.localmessagebus.LocalMessageBus;
import org.atmosphere.config.service.Disconnect;
import org.atmosphere.config.service.Get;
import org.atmosphere.config.service.ManagedService;
import org.atmosphere.config.service.PathParam;
import org.atmosphere.config.service.Ready;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceFactory;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.MetaBroadcaster;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.inject.Inject;
#ManagedService(path = "/chat/{channel: [a-zA-Z][a-zA-Z_0-9]*}")
public class ChatChannel {
#PathParam("channel")
private String channelUuid;
#Inject
private BroadcasterFactory factory;
#Inject
private AtmosphereResourceFactory resourceFactory;
#Inject
private MetaBroadcaster metaBroadcaster;
#Get
public void init(AtmosphereResource resource) {
resource.getResponse().setCharacterEncoding(StandardCharsets.UTF_8.name());
}
#Ready
public void onReady(final AtmosphereResource resource) {
String userId = resource.getRequest().getHeader("userId");
System.out.println("User " + userId + " has connected.");
}
#Disconnect
public void onDisconnect(AtmosphereResourceEvent event) {
String userId = event.getResource().getRequest().getHeader("userId");
System.out.println("User " + userId + " has disconnected");
}
#org.atmosphere.config.service.Message(encoders = MessageEncoderDecoder.class, decoders = MessageEncoderDecoder.class)
public ChatMessageDTO onMessage(ChatMessageDTO chatMessage) throws IOException {
LocalMessageBus.manager().send(new LocalMessage<ChatMessageDTO>(chatMessage));
return chatMessage;
}
}
This setup works great (users in a conversation are connected to a "channel" and the messages are sent/received immediately. LocalMessageBus is a simple message bus that will eventually be replaced by a proper message broker).
Although I don't have a use case for this, I went to set up a MetaBroadcaster in my ChatController to see if I could broadcast messages from there. Unfortunately, I am not able to properly inject/reference the MetaBroadcaster as it is always null. Here's the important bits of the ChatController:
package com.chat.privatechat;
import java.util.List;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import com.chat.privatechat.DTOs.ChatMessageDTO;
import com.chat.privatechat.DTOs.ChatSessionInitializationDTO;
import com.chat.privatechat.DTOs.EstablishedChatSessionDTO;
import com.chat.shared.http.JSONResponseHelper;
import com.chat.user.UserService;
import com.chat.user.exceptions.IsSameUserException;
import com.chat.user.exceptions.UserNotFoundException;
import com.chat.user.strategies.UserRetrievalByIdStrategy;
#Controller
public class ChatController {
#Autowired
private ChatService chatService;
#Autowired
private BeanFactory beanFactory;
#Autowired
private UserService userService;
#Inject
private MetaBroadcaster metaBroadcaster;
#RequestMapping(value="/api/chat/session", method=RequestMethod.PUT, produces="application/json", consumes="application/json")
public ResponseEntity<String> establishChatSession(#RequestBody ChatSessionInitializationDTO initialChatSession) throws IsSameUserException, BeansException, UserNotFoundException {
...
}
#RequestMapping(value="/api/chat/session/{channelUuid}", method=RequestMethod.GET, produces="application/json")
public ResponseEntity<String> getExistingChatSessionMessages(#PathVariable("channelUuid") String channelUuid) {
...
}
}
Injecting/Autowiring MetaBroadcaster nor bringing in metaBroadcaster bean from the BeanFactory work. I've searched and searched and searched without a good solution. It seems like the bean in not accessible in this Spring Controller context and I'm running out of ideas.
Thanks you for any input!
NOTE: These are the Atmosphere deps I have:
<dependency>
<groupId>org.atmosphere</groupId>
<artifactId>atmosphere-runtime</artifactId>
<version>2.4.2</version>
</dependency>
<dependency>
<groupId>org.atmosphere</groupId>
<artifactId>atmosphere-spring</artifactId>
<version>2.4.2</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>atmosphere-javascript</artifactId>
<version>2.2.3</version>
</dependency>
<dependency>
<groupId>org.atmosphere</groupId>
<artifactId>atmosphere-hazelcast</artifactId>
<version>2.4.2</version>
</dependency>

Spring, Gradle compiling error with org.springframework.boot.autoconfigure

I want to write my own IM using Spring Framework.
I read this one:
Getting Started - Messaging with RabbitMQ
When I compile this code below I get this error message:
http://pastebin.com/7gNJBAE2
I will be grateful for any help or advise.
I'm using:
Windows 8.1,
Java 8,
NetBeans 8.0.2.,
Gradle 2.5
Receiver.java
package hello;
import java.util.concurrent.CountDownLatch;
public class Receiver {
private CountDownLatch latch = new CountDownLatch(1);
public void receiveMessage(String message) {
System.out.println("Received <" + message + ">");
latch.countDown();
}
public CountDownLatch getLatch() {
return latch;
}
}
Application.java
package hello;
import java.util.concurrent.TimeUnit;
import org.springframework.amqp.core.Binding;
import org.springframework.amqp.core.BindingBuilder;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.TopicExchange;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer;
import org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
#SpringBootApplication
public class Application implements CommandLineRunner {
final static String queueName = "spring-boot";
#Autowired
AnnotationConfigApplicationContext context;
#Autowired
RabbitTemplate rabbitTemplate;
#Bean
Queue queue() {
return new Queue(queueName, false);
}
#Bean
TopicExchange exchange() {
return new TopicExchange("spring-boot-exchange");
}
#Bean
Binding binding(Queue queue, TopicExchange exchange) {
return BindingBuilder.bind(queue).to(exchange).with(queueName);
}
#Bean
SimpleMessageListenerContainer container(ConnectionFactory connectionFactory, MessageListenerAdapter listenerAdapter) {
SimpleMessageListenerContainer container = new SimpleMessageListenerContainer();
container.setConnectionFactory(connectionFactory);
container.setQueueNames(queueName);
container.setMessageListener(listenerAdapter);
return container;
}
#Bean
Receiver receiver() {
return new Receiver();
}
#Bean
MessageListenerAdapter listenerAdapter(Receiver receiver) {
return new MessageListenerAdapter(receiver, "receiveMessage");
}
public static void main(String[] args) throws InterruptedException {
SpringApplication.run(Application.class, args);
}
#Override
public void run(String... args) throws Exception {
System.out.println("Waiting five seconds...");
Thread.sleep(5000);
System.out.println("Sending message...");
rabbitTemplate.convertAndSend(queueName, "Hello from RabbitMQ!");
receiver().getLatch().await(10000, TimeUnit.MILLISECONDS);
context.close();
}
}
java.lang.IllegalStateException: Could not evaluate condition on org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration#transactionManager due to internal class not found. This can happen if you are #ComponentScanning a springframework package (e.g. if you put a #ComponentScan in the default package by mistake)
It seems that your source file is in src/main/java instead of being in src/main/java/demo
Didn't you add the package hello; manually? I copy pasted that code in a fresh project created from start.spring.io and it worked without touching anything.

Categories

Resources