Issue
I've to create a Spring batch project with two jobs that can be executed independently and together. Each job has the necessary code to read from database and to write using FlatFileItemWriter and ClassifierCompositeItemWriter. I've found that if I execute the Jobs independently (-Dspring.batch.job.names=schoolJob,-Dspring.batch.job.names=studentJob), the files are generated fine, but when I execute the Jobs together (-Dspring.batch.job.names=schoolJob,studentJob), the files of a Job only have the footer and the header. There seems to be something wrong but I can't find the cause.
Some code
Batch config, job and steps
#Configuration
#EnableBatchProcessing
#SuppressWarnings("rawtypes, unchecked")
public class MyJobConfiguration
{
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private JdbcTemplate jdbcTemplate;
#Autowired
private ConfigurableApplicationContext applicationContext;
#Bean
public Step studentStep1() {
return stepBuilderFactory.get("calculateDistinctValuesAndRegisterStudentWriters")
.tasklet(new DynamicStudentWritersConfigurationTasklet(jdbcTemplate,
applicationContext))
.build();
}
#Bean
public Step schoolStep1() {
return stepBuilderFactory.get("calculateDistinctValuesAndRegisterSchoolWriters")
.tasklet(new DynamicSchoolWritersConfigurationTasklet(jdbcTemplate,
applicationContext))
.build();
}
#Bean
#JobScope
public Step studentStep2(StudentReader reader,
#Qualifier("studentClassfierItemWriter")
ClassifierCompositeItemWriter<Student> writer) {
SimpleStepBuilder<Student, Student> studentStep2 = stepBuilderFactory.get(
"readWriteStudents").<Student, Student>chunk(2).reader(reader).writer(writer);
Map<String, FlatFileItemWriter> beansOfType = applicationContext.getBeansOfType(
FlatFileItemWriter.class);
for (FlatFileItemWriter flatFileItemWriter : beansOfType.values())
{
studentStep2.stream(flatFileItemWriter);
}
return studentStep2.build();
}
#Bean
#JobScope
public Step schoolStep2(SchoolReader reader,
#Qualifier("schoolClassfierItemWriter")
ClassifierCompositeItemWriter<School> writer) {
SimpleStepBuilder<School, School> schoolStep2 = stepBuilderFactory.get("readWriteSchools")
.<School, School>chunk(2)
.reader(reader)
.writer(writer);
Map<String, FlatFileItemWriter> beansOfType = applicationContext.getBeansOfType(
FlatFileItemWriter.class);
for (FlatFileItemWriter flatFileItemWriter : beansOfType.values())
{
schoolStep2.stream(flatFileItemWriter);
}
return schoolStep2.build();
}
#Bean
public Job studentJob(Step studentStep1, Step studentStep2) {
return jobBuilderFactory.get("studentJob").start(studentStep1).next(studentStep2).build();
}
#Bean
public Job schoolJob(Step schoolStep1, Step schoolStep2) {
return jobBuilderFactory.get("schoolJob").start(schoolStep1).next(schoolStep2).build();
}
Data source configuration
#Configuration
class DatasourceConfig
{
#Bean
public DataSource dataSource()
{
String dbSchema = "/org/springframework/batch/core/schema-h2.sql";
String initData = "data.sql";
return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2)
.addScript(dbSchema)
.addScript(initData)
.build();
}
}
Readers
#Component
class SchoolReader extends JdbcCursorItemReader<School>
{
#Autowired
private DataSource dataSource;
#Override
public void afterPropertiesSet() throws Exception
{
super.setName("schoolItemReader");
super.setDataSource(dataSource);
super.setSql("select * from school");
super.setRowMapper(new BeanPropertyRowMapper<>(School.class));
super.afterPropertiesSet();
}
}
#Component
class StudentReader extends JdbcCursorItemReader<Student>
{
#Autowired
private DataSource dataSource;
#Override
public void afterPropertiesSet() throws Exception
{
super.setName("studentItemReader");
super.setDataSource(dataSource);
super.setSql("select * from student");
super.setRowMapper(new BeanPropertyRowMapper<>(Student.class));
super.afterPropertiesSet();
}
}
Writers
#Configuration
public class SchoolWriter
{
#Autowired
private ConfigurableApplicationContext applicationContext;
#Bean(name = "schoolClassfierItemWriter")
#StepScope
public ClassifierCompositeItemWriter<School> itemWriter()
{
Map<String, FlatFileItemWriter> beansOfType = applicationContext.getBeansOfType(
FlatFileItemWriter.class);
Classifier<School, FlatFileItemWriter<School>> classifier = school -> beansOfType.get(
"school-group" + school.getGroupId() + "Writer");
return new ClassifierCompositeItemWriterBuilder().classifier(classifier).build();
}
}
#Configuration
public class StudentWriter
{
#Autowired
private ConfigurableApplicationContext applicationContext;
#Bean(name = "studentClassfierItemWriter")
#StepScope
public ClassifierCompositeItemWriter<Student> itemWriter()
{
Map<String, FlatFileItemWriter> beansOfType = applicationContext.getBeansOfType(
FlatFileItemWriter.class);
Classifier<Student, FlatFileItemWriter<Student>> classifier = student -> beansOfType.get(
"student-group" + student.getGroupId() + "Writer");
return new ClassifierCompositeItemWriterBuilder().classifier(classifier).build();
}
}
Tasklets
class DynamicSchoolWritersConfigurationTasklet implements Tasklet
{
private JdbcTemplate jdbcTemplate;
private ConfigurableApplicationContext applicationContext;
public DynamicSchoolWritersConfigurationTasklet(JdbcTemplate jdbcTemplate,
ConfigurableApplicationContext applicationContext)
{
this.jdbcTemplate = jdbcTemplate;
this.applicationContext = applicationContext;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext)
{
ConfigurableListableBeanFactory beanFactory = applicationContext.getBeanFactory();
String sql = "select distinct(groupId) from school";
List<Integer> groups = jdbcTemplate.queryForList(sql, Integer.class);
for (Integer group : groups)
{
String name = "school-group" + group + "Writer";
//#f:off
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.addPropertyValue("name", name);
propertyValues.addPropertyValue("lineAggregator", new PassThroughLineAggregator<>());
propertyValues.addPropertyValue("resource", new FileSystemResource("school-" + group + ".txt"));
propertyValues.addPropertyValue("headerCallback", (FlatFileHeaderCallback) writer -> writer.write("header-school"));
propertyValues.addPropertyValue("footerCallback", (FlatFileFooterCallback) writer -> writer.write("footer-school"));
//#f:on
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
beanDefinition.setBeanClassName(FlatFileItemWriter.class.getName());
beanDefinition.setPropertyValues(propertyValues);
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
registry.registerBeanDefinition(name, beanDefinition);
}
return RepeatStatus.FINISHED;
}
}
class DynamicStudentWritersConfigurationTasklet implements Tasklet
{
private JdbcTemplate jdbcTemplate;
private ConfigurableApplicationContext applicationContext;
public DynamicStudentWritersConfigurationTasklet(JdbcTemplate jdbcTemplate,
ConfigurableApplicationContext applicationContext)
{
this.jdbcTemplate = jdbcTemplate;
this.applicationContext = applicationContext;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext)
{
ConfigurableListableBeanFactory beanFactory = applicationContext.getBeanFactory();
String sql = "select distinct(groupId) from student";
List<Integer> groups = jdbcTemplate.queryForList(sql, Integer.class);
for (Integer group : groups)
{
String name = "student-group" + group + "Writer";
//#f:off
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.addPropertyValue("name", name);
propertyValues.addPropertyValue("lineAggregator", new PassThroughLineAggregator<>());
propertyValues.addPropertyValue("resource", new FileSystemResource("student-" + group + ".txt"));
propertyValues.addPropertyValue("headerCallback", (FlatFileHeaderCallback) writer -> writer.write("header-student"));
propertyValues.addPropertyValue("footerCallback", (FlatFileFooterCallback) writer -> writer.write("footer-student"));
//#f:on
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
beanDefinition.setBeanClassName(FlatFileItemWriter.class.getName());
beanDefinition.setPropertyValues(propertyValues);
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) beanFactory;
registry.registerBeanDefinition(name, beanDefinition);
}
return RepeatStatus.FINISHED;
}
}
DAO
#Getter
#Setter
#ToString
#NoArgsConstructor
#AllArgsConstructor
public class School
{
private int id;
private String name;
private int groupId;
}
#Getter
#Setter
#ToString
#NoArgsConstructor
#AllArgsConstructor
public class Student
{
private int id;
private String name;
private int groupId;
}
This is similar to https://stackoverflow.com/a/67635289/5019386. I think you need to make your dynamic item writers step-scoped as well, something like:
propertyValues.addPropertyValue("scope", "step");
Please note that I did not try that. That said, I would really recommend making your app do one thing and do it well, ie isolate job definitions and package/run each job separately.
Related
I'm new to springboot and has been working on a project. I'm using Springboot with hibernate, JPA and hikariCP for mysql DB operations. Currently i have two datasources in which i could not rollback a transaction which i written inside a service. I tried everything i could find, but i couldn't find out what am i missing. Any help would be appreciated and pardon my mistakes and i'm very open to suggestions. Thank you.
#SpringBootApplication
public class AdminModuleApplication {
public static void main(String[] args)
{
SpringApplication.run(AdminModuleApplication.class, args);
}
}
#Configuration
#PropertySource({ "classpath:acc_payDB_properties.properties" })
#EnableJpaRepositories(
basePackages = "com.pinnacle.accpay.dao",
entityManagerFactoryRef = "accpayEntityManager",
transactionManagerRef = "accpayTransactionManager"
)
public class acc_payDBConfig
{
#Autowired
private Environment env;
#Bean
public LocalContainerEntityManagerFactoryBean accpayEntityManager()
{
String packageList[]= {"com.pinnacle.accpay.model","com.pinnacle.admin.model"};
LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean();
em.setDataSource(accpayDataSource());
em.setPackagesToScan(packageList);
HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
em.setJpaVendorAdapter(vendorAdapter);
Properties jpaProperties = new Properties();
jpaProperties.setProperty("hibernate.hbm2ddl.auto", "none");
Properties jpaProperties2 = new Properties();
jpaProperties2.setProperty("connection.provider_class","org.hibernate.hikaricp.internal.HikariCPConnectionProvider");
em.setJpaProperties(jpaProperties2);
return em;
}
#Bean
public DataSource accpayDataSource()
{
HikariDataSource dataSource = new HikariDataSource();
dataSource.setDriverClassName(env.getProperty("jdbc.driver-class-name"));
dataSource.setJdbcUrl(env.getProperty("jdbc.url"));
dataSource.setUsername(env.getProperty("jdbc.username"));
dataSource.setPassword(env.getProperty("jdbc.password"));
dataSource.setCatalog("acc_pay");
//HikariCP specific properties. Remove if you move to other connection pooling library.
dataSource.setConnectionTimeout(20000);
dataSource.setMaximumPoolSize(20);
dataSource.setMinimumIdle(10);
dataSource.setIdleTimeout(20000);
dataSource.setMaxLifetime(40000);
dataSource.setAutoCommit(false);
dataSource.addDataSourceProperty("cachePrepStmts", true);
dataSource.addDataSourceProperty("prepStmtCacheSize", 25000);
dataSource.addDataSourceProperty("prepStmtCacheSqlLimit", 20048);
dataSource.addDataSourceProperty("useServerPrepStmts", true);
dataSource.addDataSourceProperty("initializationFailFast", true);
dataSource.setPoolName("ACCPAY DB_HIKARICP_CONNECTION_POOL");
dataSource.addDataSourceProperty("useLocalTransactionState", false);
return new HikariDataSource(dataSource);
}
#Bean
public PlatformTransactionManager accpayTransactionManager()
{
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(accpayEntityManager().getObject());
return transactionManager;
}
}
#Service
public class TranscationReceptionService {
#Autowired
PO_TXN_MasterBeanRepository po_TXN_MasterBeanRepository;
#Autowired
GRN_TXN_MasterBeanRepository grn_TXN_MasterBeanRepository;
#Autowired
Invoice_TXN_MasterBeanRepository invoice_TXN_MasterBeanRepository;
#Autowired
POEventLogRepository poEventLogRepository;
#Autowired
InvoiceEventLogRepository invoiceEventLogRepository;
#Autowired
GRNEventlogRepository grnEventlogRepository;
#Autowired
PO_GRN_INVBeanRepository po_GRN_INVBeanRepository;
#Autowired
LinkEventLogBeanRepository linkEventLogBeanRepository;
#Autowired
ScheduledJob scheudledJob;
#Autowired
acc_payDBConfig acc_paydbConfig;
#Value("${in_process_status}")
private String in_process_status;
#Transactional(propagation = Propagation.REQUIRED, rollbackFor = Exception.class)
public void updateMatchStatus(Long poId, Long invoiceId, String poStatus, String invoiceStatus)
{
try
{
po_TXN_MasterBeanRepository.setPOStatusAfterMatching(poStatus, poId, invoiceId);
POEventLogBean poEventLogBean = new POEventLogBean();
poEventLogBean.setPo_id(poId); poEventLogBean.setEvent_time(LocalDateTime.now().toString());
poEventLogBean.setStatus(poStatus);
poEventLogRepository.save(poEventLogBean);
po_GRN_INVBeanRepository.setInvoiceNumber(poId, invoiceId);
Long linkId = po_GRN_INVBeanRepository.getLinkIdfromPONumber(poId);
LinkEventLogBean linkEventLogBean=new LinkEventLogBean();
linkEventLogBean.setLink_id(linkId);
linkEventLogBean.setEvent_time(LocalDateTime.now().toString());
linkEventLogBean.setStatus("");
linkEventLogBeanRepository.save(linkEventLogBean);
invoice_TXN_MasterBeanRepository.setInvoiceStatusAfterMatching(poId, invoiceStatus, invoiceId);
InvoiceEventLogBean invoiceEventLogBean=new InvoiceEventLogBean();
invoiceEventLogBean.setInv_id(invoiceId); invoiceEventLogBean.setEvent_time(LocalDateTime.now().toString());
invoiceEventLogBean.setStatus(invoiceStatus);
invoiceEventLogRepository.save(invoiceEventLogBean);
}
catch (Exception e)
{
}
}
}
#Transactional
#Repository
public interface PO_TXN_MasterBeanRepository extends JpaRepository<PO_TXN_MasterBean, Long>
{
#Transactional
#Modifying(clearAutomatically = true)
#Query("UPDATE PO_TXN_MasterBean p SET p.status= :status, p.inv_id= :invId WHERE p.po_id = :pId")
public void setPOStatusAfterMatching(#Param("status") String status, #Param("pId") Long poId, #Param("invId") Long invoiceId);
}
#Transactional
#Repository
public interface POEventLogRepository extends JpaRepository<POEventLogBean, Integer>
{
}
#Transactional
#Repository
public interface PO_GRN_INVBeanRepository extends JpaRepository<PO_GRN_INVBean, Long>
{
#Transactional
#Modifying(clearAutomatically = true)
#Query("UPDATE PO_GRN_INVBean p SET p.inv_id= :invId WHERE p.po_id = :pId")
public void setInvoiceNumber(#Param("pId") Long poId, #Param("invId") Long invoiceId);
#Query("SELECT p.link_id FROM PO_GRN_INVBean p WHERE p.po_id = :pId")
public Long getLinkIdfromPONumber(#Param("pId") Long poId);
}
#Transactional
#Repository
public interface LinkEventLogBeanRepository extends JpaRepository<LinkEventLogBean, Long>
{
}
#Repository
#Transactional
public interface Invoice_TXN_MasterBeanRepository extends JpaRepository<Invoice_TXN_MasterBean, Long>
{
#Modifying(clearAutomatically = true)
#Transactional
#Query("UPDATE Invoice_TXN_MasterBean i SET i.po_id = :pid, i.status =:status WHERE i.inv_id = :inid")
public void setInvoiceStatusAfterMatching(#Param("pid") Long pid, #Param("status") String status, #Param("inid") Long inId);
}
#Repository
public interface InvoiceEventLogRepository extends JpaRepository<InvoiceEventLogBean, Integer>
{
}
Transaction Rollback is not working as expected because the exception is handled with a try-catch block within TranscationReceptionService.updateMatchStatus(). This prevents the spring framework to be aware of the exception raised and hence does not result in rollback.
I am new to JDBC template and am trying to use prepared statement for inserting data into database using auto commit mode off for achieving high performance but at the end i'm not able to commit the transaction. Please suggest some correct approach or reference that might solve my problem.
Thanks in advance...
SpringjdbcApplication.java
#SpringBootApplication
public class SpringjdbcApplication
{
public static void main(String[] args)
{
ApplicationContext context = SpringApplication.run(SpringjdbcApplication.class, args);
SampleService service = context.getBean(SampleService.class);
List<Batch> batchList = new ArrayList<>();
batchList.add(new Batch("A","B"));
batchList.add(new Batch("B","B"));
batchList.add(new Batch("C","B"));
batchList.add(new Batch("D","B"));
batchList.add(new Batch("E","B"));
System.err.println("The number of rows inserted = "+service.singleInsert(batchList));
System.err.println("The count of batch class is = "+service.getCount());
}
}
SampleConfiguration.java
#Configuration
public class SampleConfiguration
{
#Bean
public DataSource mysqlDataSource()
{
HikariConfig config= new HikariConfig();
config.setDriverClassName("ClassName");
config.setJdbcUrl("URL");
config.setUsername("User");
config.setPassword("Password");
config.setMinimumIdle(600);
config.setMaximumPoolSize(30);
config.setConnectionTimeout(251);
config.setMaxLifetime(250);
config.setAutoCommit(false);
return new HikariDataSource(config);
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource)
{
JdbcTemplate jdbcTemplate = new JdbcTemplate();
jdbcTemplate.setDataSource(dataSource);
return jdbcTemplate;
}
}
Batch.java
#Entity
public class Batch implements Serializable
{
private static final long serialVersionUID = -5687736664713755991L;
#Id
#Column(name="field1")
private String field1;
#Column(name="field2")
private String field2;
....... getter, setter and constructor
}
SampleService.java
#Service
public interface SampleService
{
public int singleInsert(List<Batch> batchList);
}
SampleServiceImpl.java
#Service
public class SampleServiceImpl implements SampleService
{
#Autowired
JdbcTemplate jdbcTemplate;
#Override
public int singleInsert(List<Batch> batchList)
{
for(Batch i:batchList)
{
jdbcTemplate.update("insert into batch values(?,?)",i.getField1(),i.getField2());
}
try
{
DataSourceUtils.getConnection(jdbcTemplate.getDataSource()).commit();
}
catch(Exception e)
{
e.printStackTrace();
}
return 1;
}
}
I need helps.
I want to create a batch application which the scenario is
Read data from database and then write it to text file. [ Consider this as the first step]
When the first step is done, the second step is writing a ctl file which contains the writeCount of the first step.
My approach is that I create a stepExecutionListener to put the jobId context to JobExecutionContext.
So, In ItemReader of second step, I can read from the database. But I don't know how to get the jobExecutionId so that I can query the Mysql to get the right record.
Here is the code
public class WriteDataCtlFile {
private static final Logger log = LoggerFactory.getLogger(WriteDataCtlFile.class);
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public Step writeCtlFile(ItemReader<JobContext> ctlReader,
ItemProcessor<JobContext, CtlFile> ctlProcessor,
ItemWriter<CtlFile> ctlWriter){
return stepBuilderFactory.get("writeCtlFile")
.<JobContext, CtlFile>chunk(100)
.reader(ctlReader)
.processor(ctlProcessor)
.writer(ctlWriter)
.build();
}
#JobScope
#Bean
public ItemReader<JobContext> ctlReader(DataSource dataSource, JobContextMapper jobContextMapper) {
JdbcCursorItemReader<JobContext> reader = new JdbcCursorItemReader<>();
reader.setDataSource(dataSource);
reader.setSql("SELECT short_context FROM BATCH_JOB_EXECUTION_CONTEXT WHERE JOB_EXECUTION_ID = ?");
// THIS IS WHERE I WANT TO GET jobId
reader.setPreparedStatementSetter(new JobIdPrepareStatement(jobId));
reader.setRowMapper(jobContextMapper);
return reader;
}
#Bean
public ItemProcessor<JobContext, CtlFile> ctlProcessor(){
return new ItemProcessor<JobContext, CtlFile>() {
#Override
public CtlFile process(JobContext jobContext) throws Exception {
return new CtlFile(jobContext.getShort_context());
}
};
}
#Bean
public FlatFileItemWriter<CtlFile> ctlWriter(){
FlatFileItemWriter<CtlFile> flatFileItemWriter = new FlatFileItemWriter<>();
flatFileItemWriter.setResource(new FileSystemResource("C:\\Users\\wathanyu.phromma\\data-output.ctl"));
flatFileItemWriter.setLineAggregator(new LineAggregator<CtlFile>() {
#Override
public String aggregate(CtlFile ctlFile) {
Gson gson = new Gson();
Map<String, Object> map = gson.fromJson(ctlFile.getWrittenRecordsCount(), Map.class);
return String.valueOf(map.get("writeCount"));
}
});
return flatFileItemWriter;
}
}
public class WriteDataTxtFile {
private static final Logger log = LoggerFactory.getLogger(WriteDataTxtFile.class);
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public Step writeTxtFile(
ItemReader<Account> reader,
ItemProcessor<Account, Account> processor,
ItemWriter<Account> writer){
return stepBuilderFactory.get("writeTxtFile")
.<Account, Account>chunk(2)
.reader(reader)
.processor(processor)
.writer(writer)
.listener(new WriteDataTxtStepListener())
.build();
}
#Bean
#StepScope
public JdbcCursorItemReader<Account> reader(DataSource dataSource, AccountMapper accountMapper) {
log.info("test");
JdbcCursorItemReader<Account> reader = new JdbcCursorItemReader<>();
reader.setDataSource(dataSource);
reader.setSql("SELECT * FROM account");
reader.setRowMapper(accountMapper);
return reader;
}
#Bean
public ItemProcessor<Account, Account> processor(){
return new ItemProcessor<Account, Account>() {
#Override
public Account process(Account account) throws Exception {
return account;
}
};
}
#Bean
public FlatFileItemWriter<Account> writer(){
FlatFileItemWriter<Account> flatFileItemWriter = new FlatFileItemWriter<>();
flatFileItemWriter.setResource(new FileSystemResource("C:\\Users\\wathanyu.phromma\\data-output.txt"));
flatFileItemWriter.setLineAggregator(new DelimitedLineAggregator<Account>(){{
setDelimiter("|");
setFieldExtractor(new BeanWrapperFieldExtractor<Account>(){{
setNames(new String[]{ "id", "accountId", "accountName","createdAt", "updatedAt"});
}});
}});
return flatFileItemWriter;
}
public class WriteDataTxtStepListener implements StepExecutionListener {
private static final Logger log = LoggerFactory.getLogger(WriteDataTxtStepListener.class);
#Override
public void beforeStep(StepExecution stepExecution) {
Date date = new Date();
String currentDate = new SimpleDateFormat("YYYY-mm-dd").format(date);
stepExecution.getJobExecution().getExecutionContext().put("jobId", stepExecution.getJobExecutionId());
stepExecution.getJobExecution().getExecutionContext().put("date", currentDate);
log.info("JobId = " + stepExecution.getJobExecutionId());
log.info("Before Step Count = " + stepExecution.getWriteCount());
}
#Override
public ExitStatus afterStep(StepExecution stepExecution) {
stepExecution.getJobExecution().getExecutionContext().put("writeCount", stepExecution.getWriteCount());
log.info("After Step Count = " + stepExecution.getWriteCount());
log.info("ExitStatus = " + stepExecution.getExitStatus().getExitCode());
return stepExecution.getExitStatus();
}
}
public class WriteDataToFlatFile {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Bean
public Job readFromApi(Step writeTxtFile, Step writeCtlFile){
return jobBuilderFactory.get("readFromApiToFlatFile")
.incrementer(new RunIdIncrementer())
.start(writeTxtFile)
.next(writeCtlFile)
.build();
}
#Bean
public DataSource dataSource(){
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.jdbc.Driver");
dataSource.setUrl("jdbc:mysql://localhost:3306/xxxx?useSSL=false");
dataSource.setUsername("xxxx");
dataSource.setPassword("xxxx");
return dataSource;
}
}
To get data from the job execution context in the reader of your second step, you can inject the value as a parameter in your bean definition method like this:
#JobScope
#Bean
public ItemReader<JobContext> ctlReader(DataSource dataSource, JobContextMapper jobContextMapper, #Value("#{jobExecutionContext['jobId']}") int jobId) {
// use jobId
}
Hope this helps.
I'am working on a spring batch. I have a partitioning step (of a list of objects) and then a slave step with Reader and Writer.
I want to execute the processStep in parallel mode. So, I want to have a specific instances of Reader-Writer for each partition.
For the moment, created partitions uses same instances of Reader-Writer. So, those operations are done in serial mode: Read and write the first partition and then do the same for the next one when the first is completed.
The spring boot configuration class:
#Configuration
#Import({ DataSourceConfiguration.class})
public class BatchConfiguration {
private final static int COMMIT_INTERVAL = 1;
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
#Qualifier(value="mySqlDataSource")
private DataSource mySqlDataSource;
public static int GRID_SIZE = 3;
public static List<Pojo> myList;
#Bean
public Job myJob() throws UnexpectedInputException, ParseException, NonTransientResourceException, Exception {
return jobBuilderFactory.get("myJob")
.incrementer(new RunIdIncrementer())
.start(partitioningStep())
.build();
}
#Bean(name="partitionner")
public MyPartitionner partitioner() {
return new MyPartitionner();
}
#Bean
public SimpleAsyncTaskExecutor taskExecutor() {
SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor();
taskExecutor.setConcurrencyLimit(GRID_SIZE);
return taskExecutor;
}
#Bean
public Step partitioningStep() throws NonTransientResourceException, Exception {
return stepBuilderFactory.get("partitioningStep")
.partitioner("processStep", partitioner())
.step(processStep())
.taskExecutor(taskExecutor())
.build();
}
#Bean
public Step processStep() throws UnexpectedInputException, ParseException, NonTransientResourceException, Exception {
return stepBuilderFactory.get("processStep")
.<List<Pojo>, List<Pojo>> chunk(COMMIT_INTERVAL)
.reader(processReader())
.writer(processWriter())
.taskExecutor(taskExecutor())
.build();
}
#Bean
public ProcessReader processReader() throws UnexpectedInputException, ParseException, NonTransientResourceException, Exception {
return new ProcessReader();
}
#Bean
public ProcessWriter processWriter() {
return new ProcessWriter();
}
}
The partitionner class
public class MyPartitionner implements Partitioner{
#Autowired
private IService service;
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
// list of 300 object partitionned like bellow
...
Map<String, ExecutionContext> partitionData = new HashMap<String, ExecutionContext>();
ExecutionContext executionContext0 = new ExecutionContext();
executionContext0.putString("from", Integer.toString(0));
executionContext0.putString("to", Integer.toString(100));
partitionData.put("Partition0", executionContext0);
ExecutionContext executionContext1 = new ExecutionContext();
executionContext1.putString("from", Integer.toString(101));
executionContext1.putString("to", Integer.toString(200));
partitionData.put("Partition1", executionContext1);
ExecutionContext executionContext2 = new ExecutionContext();
executionContext2.putString("from", Integer.toString(201));
executionContext2.putString("to", Integer.toString(299));
partitionData.put("Partition2", executionContext2);
return partitionData;
}
}
The Reader class
public class ProcessReader implements ItemReader<List<Pojo>>, ChunkListener {
#Autowired
private IService service;
private StepExecution stepExecution;
private static List<String> processedIntervals = new ArrayList<String>();
#Override
public List<Pojo> read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException {
System.out.println("Instance reference: "+this.toString());
if(stepExecution.getExecutionContext().containsKey("from") && stepExecution.getExecutionContext().containsKey("to")){
Integer from = Integer.valueOf(stepExecution.getExecutionContext().get("from").toString());
Integer to = Integer.valueOf(stepExecution.getExecutionContext().get("to").toString());
if(from != null && to != null && !processedIntervals.contains(from + "" + to) && to < BatchConfiguration.myList.size()){
processedIntervals.add(String.valueOf(from + "" + to));
return BatchConfiguration.myList.subList(from, to);
}
}
return null;
}
#Override
public void beforeChunk(ChunkContext context) {
this.stepExecution = context.getStepContext().getStepExecution();
}
#Override
public void afterChunk(ChunkContext context) { }
#Override
public void afterChunkError(ChunkContext context) { }
}
}
The writer class
public class ProcessWriter implements ItemWriter<List<Pojo>>{
private final static Logger LOGGER = LoggerFactory.getLogger(ProcessWriter.class);
#Autowired
private IService service;
#Override
public void write(List<? extends List<Pojo>> pojos) throws Exception {
if(!pojos.isEmpty()){
for(Pojo item : pojos.get(0)){
try {
service.remove(item.getId());
} catch (Exception e) {
LOGGER.error("Error occured while removing the item [" + item.getId() + "]", e);
}
}
}
}
}
Can you please tell me what is wrong with my code?
Resolved by adding #StepScope to my reader and writer beans declaration:
#Configuration
#Import({ DataSourceConfiguration.class})
public class BatchConfiguration {
...
#Bean
#StepScope
public ProcessReader processReader() throws UnexpectedInputException, ParseException, NonTransientResourceException, Exception {
return new ProcessReader();
}
#Bean
#StepScope
public ProcessWriter processWriter() {
return new ProcessWriter();
}
...
}
By this way, you I have an different instance of the chunck (Reader-Writer) for each partition.
Code:
RabbitMQListener:
#Component
public class ServerThroughRabbitMQ implements ServerThroughAMQPBroker {
private static final AtomicLong ID_COUNTER=new AtomicLong();
private final long instanceId=ID_COUNTER.incrementAndGet();
#Autowired
public ServerThroughRabbitMQ( UserService userService,LoginService loginService....){
....
}
#Override
#RabbitListener(queues = "#{registerQueue.name}")
public String registerUserAndLogin(String json) {
.....
}
ServerConfig:
#Configuration
public class ServerConfig {
#Value("${amqp.broker.exchange-name}")
private String exchangeName;
#Value("${amqp.broker.host}")
private String ampqBrokerHost;
#Value("${amqp.broker.quidco.queue.postfix}")
private String quidcoQueuePostfix;
#Value("${amqp.broker.quidco.queue.durability:true}")
private boolean quidcoQueueDurability;
#Value("${amqp.broker.quidco.queue.autodelete:false}")
private boolean quidcoQueueAutodelete;
private String registerAndLoginQuequName;
#PostConstruct
public void init() {
registerAndLoginQuequName = REGISTER_AND_LOGIN_ROUTING_KEY + quidcoQueuePostfix;
public String getRegisterAndLoginQueueName() {
return registerAndLoginQuequName;
}
public String getLoginAndCheckBonusQueueName() {
return loginAndCheckBonusQuequName;
}
#Bean
public ConnectionFactory connectionFactory() {
CachingConnectionFactory connectionFactory = new CachingConnectionFactory(ampqBrokerHost);
return connectionFactory;
}
#Bean
public AmqpAdmin amqpAdmin() {
return new RabbitAdmin(connectionFactory());
}
#Bean
public TopicExchange topic() {
return new TopicExchange(exchangeName);
}
#Bean(name = "registerQueue")
public Queue registerQueue() {
return new Queue(registerAndLoginQuequName, quidcoQueueDurability, false, quidcoQueueAutodelete);
}
#Bean
public Binding bindingRegisterAndLogin() {
return BindingBuilder.bind(registerQueue()).to(topic()).with(REGISTER_AND_LOGIN_ROUTING_KEY);
}
}
TestConfig:
#EnableRabbit
#TestPropertySource("classpath:test.properties")
public class ServerThroughAMQPBrokerRabbitMQIntegrationTestConfig {
private final ExecutorService=Executors.newCachedThreadPool();
private LoginService loginServiceMock=mock(LoginService.class);
private UserService userServiceMock =mock(UserService.class);
#Bean
public ExecutorService executor() {
return executorService;
}
#Bean
public LoginService getLoginServiceMock() {
return loginServiceMock;
}
#Bean
public UserService getUserService() {
return userServiceMock;
}
#Bean
#Autowired
public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(ConnectionFactory connectionFactory) {
SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory();
factory.setConnectionFactory(connectionFactory);
factory.setMaxConcurrentConsumers(5);
return factory;
}
#Bean
#Autowired
public RabbitTemplate getRabbitTemplate(ConnectionFactory connectionFactory) {
final RabbitTemplate rabbitTemplate = new RabbitTemplate(connectionFactory);
return rabbitTemplate;
}
#Bean
public ServerThroughRabbitMQ getServerThroughRabbitMQ() {
return new ServerThroughRabbitMQ(userServiceMock, loginServiceMock,...);
}
}
Integration tests:
#RunWith(SpringJUnit4ClassRunner.class)
#SpringApplicationConfiguration(classes ={ServerConfig.class,ServerThroughAMQPBrokerRabbitMQIntegrationTestConfig.class})
#Category({IntegrationTest.class})
#TestPropertySource("classpath:test.properties")
public class ServerThroughAMQPBrokerRabbitMQIntegrationTest {
final private ObjectMapper jackson = new ObjectMapper();
#Autowired
private ExecutorService executor;
#Autowired
private ServerThroughRabbitMQ serverThroughRabbitMQ;
#Autowired
private RabbitTemplate template;
#Autowired
private TopicExchange exchange;
#Autowired
UserService userService;
#Autowired
LoginService loginService;
#Autowired
private AmqpAdmin amqpAdmin;
#Autowired
private ServerConfig serverConfig;
final String username = "username";
final String email = "email#email.com";
final Integer tcVersion=1;
final int quidcoUserId = 1;
final String jwt = ProcessLauncherForJwtPhpBuilderUnitWithCxtTest.EXPECTED_JWT;
#Before
public void cleanAfterOthersForMyself() {
cleanTestQueues();
}
#After
public void cleanAfterMyselfForOthers() {
cleanTestQueues();
}
private void cleanTestQueues() {
amqpAdmin.purgeQueue(serverConfig.getRegisterAndLoginQueueName(), false);
}
#Test
#Category({SlowTest.class,IntegrationTest.class})
public void testRegistrationAndLogin() throws TimeoutException {
final Waiter waiter = new Waiter();
when(userService.register(anyString(), anyString(), anyString())).thenReturn(...);
when(loginService....()).thenReturn(...);
executor.submit(() -> {
final RegistrationRequest request = new RegistrationRequest(username, email,tcVersion);
final String response;
try {
//#todo: converter to convert RegistrationRequest inside next method to json
response = (String) template.convertSendAndReceive(exchange.getName(), REGISTER_AND_LOGIN_ROUTING_KEY.toString(), jackson.writeValueAsString(request));
waiter.assertThat(response, not(isEmptyString()));
final RegistrationResponse registrationResponse = jackson.readValue(response, RegistrationResponse.class);
waiter.assertThat(...);
waiter.assertThat(...);
} catch (Exception e) {
throw new RuntimeException(e);
}
waiter.resume();
});
waiter.await(5, TimeUnit.SECONDS);
}
}
When I run that test separetly , everything works fine, but when I run it with other tests the mocked ServerThroughRabbitMQ isn't being used, so some spring caches force to use old rabbit listener.
I tried to debug it and I can see, that correct bean is being autowired to the test, but for some reason old listener is using(old bean field instanceId=1 new mocked bean instanceId=3) and test failing(Not sure how it's possible, so if in case of existing old bean I assume to get an autowire exception).
I tried to use #DirtiesContext BEFORE_CLASS, but faced anoter problem(see here)
RabbitMQ and Integration Testing can be hard, since Rabbit MQ keeps some kind of state:
- messages from previous tests in queues
- listeners from previous tests still listening on queues
There are several approaches:
Purge all queues before you start the test (that might be what you mean by cleanTestQueues())
Delete all queues (or use temporary queues) and recreate them before each test
Using the Rabbit Admin Rest API killing listeners or connections of previous tests
delete the vhost and recreating the infrasture for each test (which is the most brutal way)