Spring batch custom reader to map all the values to array field - java

Hi folks i am new in spring batch and stuck in below problem, requesting to give some suggestion
Problem statement = spring batch reader after reading data it should map to array field
input file(tab seprated)
val1 val2 val3
va21 va22 va23
va31 va32 va33
Pojo
class SamplePojo{
private Object[] value;
}
reader should map the field as mention below
output pojo
value[0] = val1
value[1] = va12
value[2] = va13
value[0] = va21
value[1] = va22
value[2] = va23
value[0] = va31
value[1] = va32
value[2] = va33
how can i configure my reader to achive above output

FieldSetMapper is the strategy interface that allows you to customize how tokenized lines are mapped to domain objects. In your case, you can use a FlatFileItemReader with a DelimitedLineTokenizer and a custom FieldSetMapper. Here is a quick example:
import java.util.Arrays;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.FieldSet;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.validation.BindException;
#Configuration
#EnableBatchProcessing
public class SO72571852 {
#Bean
public FlatFileItemReader<Pojo> itemReader() throws Exception {
DefaultLineMapper<Pojo> lineMapper = new DefaultLineMapper<>();
lineMapper.setLineTokenizer(new DelimitedLineTokenizer(" "));
lineMapper.setFieldSetMapper(new FieldSetMapper<Pojo>() {
#Override
public Pojo mapFieldSet(FieldSet fieldSet) throws BindException {
Pojo pojo = new Pojo();
Object[] values = new Object[3];
values[0] = fieldSet.readString(0);
values[1] = fieldSet.readString(1);
values[2] = fieldSet.readString(2);
pojo.values = values;
return pojo;
}
});
FlatFileItemReader<Pojo> flatFileItemReader = new FlatFileItemReader<>();
flatFileItemReader.setResource(new FileSystemResource("input.tsv"));
flatFileItemReader.setLineMapper(lineMapper);
flatFileItemReader.afterPropertiesSet();
return flatFileItemReader;
}
#Bean
public ItemWriter<Pojo> itemWriter() {
return items -> {items.forEach(System.out::println);};
}
#Bean
public Job job(JobBuilderFactory jobs, StepBuilderFactory steps) throws Exception {
return jobs.get("job")
.start(steps.get("step")
.<Pojo, Pojo>chunk(5)
.reader(itemReader())
.writer(itemWriter())
.build())
.build();
}
#Bean
public DataSource dataSource() {
return new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.HSQL)
.addScript("/org/springframework/batch/core/schema-hsqldb.sql")
.build();
}
static class Pojo {
private Object[] values;
#Override
public String toString() {
return "Pojo{values=" + Arrays.toString(values) + '}';
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(SO72571852.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}

Related

Trigger quartz job at certain time Java spring boot

I have issue when I launch the application quartz job in my JAVA Spring Boot project is triggered by itself despite it should trigger on the set certain timet for this case 12:00 o'clock every day. I tried to use with SimpleTriggerJobFactory class because I have issue with simple trigger class while configuring job.
My QuartzConfig class
package com.example.demo.config;
import com.example.demo.jobs.CurrencyImportJob;
import lombok.extern.slf4j.Slf4j;
import org.quartz.JobDetail;
import org.quartz.Trigger;
import org.quartz.spi.TriggerFiredBundle;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.JobDetailFactoryBean;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.scheduling.quartz.SimpleTriggerFactoryBean;
import org.springframework.scheduling.quartz.SpringBeanJobFactory;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
#Slf4j
#Configuration
public class QuartzConfig {
final ApplicationContext applicationContext;
public QuartzConfig(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
#Bean
SpringBeanJobFactory createSpringBeanJobFactory (){
return new SpringBeanJobFactory() {
#Override
protected Object createJobInstance
(final TriggerFiredBundle bundle) throws Exception {
final Object job = super.createJobInstance(bundle);
applicationContext
.getAutowireCapableBeanFactory()
.autowireBean(job);
return job;
}
};
}
#Bean
public SchedulerFactoryBean createSchedulerFactory
(SpringBeanJobFactory springBeanJobFactory, Trigger trigger) {
SchedulerFactoryBean schedulerFactory
= new SchedulerFactoryBean();
schedulerFactory.setAutoStartup(true);
schedulerFactory.setWaitForJobsToCompleteOnShutdown(true);
schedulerFactory.setTriggers(trigger);
springBeanJobFactory.setApplicationContext(applicationContext);
schedulerFactory.setJobFactory(springBeanJobFactory);
return schedulerFactory;
}
#Bean
public SimpleTriggerFactoryBean
createSimpleTriggerFactoryBean(JobDetail jobDetail)
{
LocalDateTime now = LocalDateTime.now().withHour(12).withMinute(0);
Date date = Date.from(now.atZone(ZoneId.systemDefault()).toInstant());
SimpleTriggerFactoryBean simpleTriggerFactory = new SimpleTriggerFactoryBean();
simpleTriggerFactory.setStartTime(date);
simpleTriggerFactory.setJobDetail(jobDetail);
simpleTriggerFactory.setStartDelay(0);
simpleTriggerFactory.setRepeatInterval(1000);
simpleTriggerFactory.setRepeatCount(0);
return simpleTriggerFactory;
}
#Bean
public JobDetailFactoryBean createJobDetailFactoryBean(){
JobDetailFactoryBean jobDetailFactory
= new JobDetailFactoryBean();
jobDetailFactory.setJobClass(CurrencyImportJob.class);
return jobDetailFactory;
}
}

Spring Batch : How to read footer of CSV file and validation using FlatFileItemReader

I am using Spring Batch and FlatFileItemReader to read a .CSV file. A file have a header(first line), details and footer(last line). So, I want to validate total number of details by a footer line.
This is my example .csv file.
movie.csv
Name|Type|Year
Notting Hill|romantic comedy|1999
Toy Story 3|Animation|2010
Captain America: The First Avenger|Action|2011
3
from example file
First line is a header (and I ignore it).
At line 2-4 is a detail lines, and last is a footer.
I want to read footer and get value (last line = 3)
and after, get total number recod of details (in this case we have 3 lines)
and last I'll validation total from footer (3) and total number record of details (3) is equals?
and this is my code.
#Bean
#StepScope
public FlatFileItemReader<Movie> movieItemReader(String filePath) {
FlatFileItemReader<Movie> reader = new FlatFileItemReader<>();
reader.setLinesToSkip(1); //skip header line
reader.setResource(new PathResource(filePath));
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer("|");
DefaultLineMapper<Movie> movieLineMapper = new DefaultLineMapper<>();
FieldSetMapper<Movie> movieMapper = movieFieldSetMapper();
movieLineMapper.setLineTokenizer(tokenizer);
movieLineMapper.setFieldSetMapper(movieFieldSetMapper);
movieLineMapper.afterPropertiesSet();
reader.setLineMapper(movieLineMapper);
return reader;
}
public FieldSetMapper<Movie> movieFieldSetMapper() {
BeanWrapperFieldSetMapper<Movie> movieMapper = new BeanWrapperFieldSetMapper<>();
movieMapper.setTargetType(Movie.class);
return movieMapper;
}
You can use a chunk oriented step as a validation step before your job's business logic. This step would use a ItemReadListener to save the last item and a StepExecutionListener for the validation. Here is a quick example:
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.ItemReadListener;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.listener.StepExecutionListenerSupport;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.PassThroughLineMapper;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ByteArrayResource;
#Configuration
#EnableBatchProcessing
public class MyJob {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
#StepScope
public FlatFileItemReader<String> itemReader() {
FlatFileItemReader<String> reader = new FlatFileItemReader<>();
reader.setLinesToSkip(1); //skip header line
reader.setResource(new ByteArrayResource("header\nitem1\nitem2\n2".getBytes()));
reader.setLineMapper(new PassThroughLineMapper());
return reader;
}
#Bean
public ItemWriter<String> itemWriter() {
return items -> {
for (String item : items) {
System.out.println("item = " + item);
}
};
}
#Bean
public Step step1() {
MyListener myListener = new MyListener();
return steps.get("step1")
.<String, String>chunk(5)
.reader(itemReader())
.writer(itemWriter())
.listener((ItemReadListener<String>) myListener)
.listener((StepExecutionListener) myListener)
.build();
}
#Bean
public Step step2() {
return steps.get("step2")
.tasklet((contribution, chunkContext) -> {
System.out.println("Total count is ok as validated by step1");
return RepeatStatus.FINISHED;
})
.build();
}
#Bean
public Job job() {
return jobs.get("job")
.start(step1())
.next(step2())
.build();
}
static class MyListener extends StepExecutionListenerSupport implements ItemReadListener<String> {
private String lastItem;
#Override
public void beforeRead() {
}
#Override
public void afterRead(String item) {
this.lastItem = item;
}
#Override
public void onReadError(Exception ex) {
}
#Override
public ExitStatus afterStep(StepExecution stepExecution) {
int readCount = stepExecution.getReadCount();
int totalCountInFooter = Integer.valueOf(this.lastItem); // TODO sanity checks (number format, etc)
System.out.println("readCount = " + (readCount - 1)); // substract footer from the read count
System.out.println("totalCountInFooter = " + totalCountInFooter);
// TODO do validation on readCount vs totalCountInFooter
return ExitStatus.COMPLETED; // return appropriate exit status according to validation result
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}
This example prints:
item = item1
item = item2
item = 2
readCount = 2
totalCountInFooter = 2
Total count is ok as validated by step1
Hope this helps.

Using Spring Batch to read to convert rows from input CSV file to output CSV file with 1 to many relation

I've already published this question:
How to use Spring Batch read CSV,process it and write it as a CSV with one row can produce more than one row?
And also reviewed these relevant answers:
Spring Batch - Using an ItemWriter with List of Lists
But still can't figure out how to use Spring Batch in order to:
Read a row from input CSV file.
Process it and produce one or more output rows.
Write the output rows into the output file.
I know that the solution should be implementing a writer that will accepts a list of items and will use "delegate" in some way in order to process the items one by one.
I would appreciate if someone can shed some light on this.
My code:
public class CsvRowsProcessor implements ItemProcessor<RowInput, List<RowOutput>>{
#Override
public List<RowOutput> process(final RowInput rowInput) {
final String id = rowInput.getId();
final String title = rowInput.getTitle();
final String description = rowInput.getDescription();
final RowOutput transformedRowInput = new RowOutput(id, title, description);
List<RowOutput> rows=new LinkedList<>();
rows.add(transformedRowInput);
return rows;
}
}
#Bean
ItemWriter<RowOutput> csvRowsWriter() {
FlatFileItemWriter<RowOutput> csvFileWriter = new FlatFileItemWriter<>();
csvFileWriter.setResource(new FileSystemResource("C:\\Users\\orenl\\IdeaProjects\\Spring-Batch-CSV-Example\\src\\main\\resources\\outputFile.csv"));
LineAggregator<RowOutput> lineAggregator = createLineAggregator();
csvFileWriter.setLineAggregator(lineAggregator);
csvFileWriter.setHeaderCallback(new FlatFileHeaderCallback() {
public void writeHeader(Writer writer) throws IOException {
writer.write("Id,Title,Description");
}
});
return csvFileWriter;
}
private LineAggregator<RowOutput> createLineAggregator() {
DelimitedLineAggregator<RowOutput> lineAggregator = new DelimitedLineAggregator<>();
lineAggregator.setDelimiter(",");
FieldExtractor<RowOutput> fieldExtractor = createFieldExtractor();
lineAggregator.setFieldExtractor(fieldExtractor);
return lineAggregator;
}
private FieldExtractor<RowOutput> createFieldExtractor() {
BeanWrapperFieldExtractor<RowOutput> extractor = new BeanWrapperFieldExtractor<>();
extractor.setNames(new String[] { "Id", "Title", "Description" });
return extractor;
}
#Bean
public Step csvFileToFileStep() {
return stepBuilderFactory.get("csvFileToFileStep")
.<RowInput ,RowOutput>chunk(1)
.reader(csvRowsReader())
.processor(csvRowsProcessor())
.writer(csvRowsWriter())
.build();
}
#Bean
Job csvFileToCsvJob(JobCompletionNotificationListener listener) {
return jobBuilderFactory.get("csvFileToCsvJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(csvFileToFileStep())
.end()
.build();
}
Here is an example:
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.ListItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
#EnableBatchProcessing
public class MyJob {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
public ItemReader<Integer> itemReader() {
return new ListItemReader<>(Arrays.asList(1, 3, 5, 7, 9));
}
#Bean
public ItemProcessor<Integer, List<Integer>> itemProcessor() {
return item -> {
List<Integer> result = new ArrayList<>();
result.add(item);
result.add(item + 1);
return result;
};
}
#Bean
public ItemWriter<List<Integer>> itemWriter() {
return items -> {
for (List<Integer> item : items) {
for (Integer integer : item) {
System.out.println("integer = " + integer);
}
}
};
}
#Bean
public Step step() {
return steps.get("step")
.<Integer, List<Integer>>chunk(2)
.reader(itemReader())
.processor(itemProcessor())
.writer(itemWriter())
.build();
}
#Bean
public Job job() {
return jobs.get("job")
.start(step())
.build();
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}
This sample reads some numbers and for each number it returns the number and its successor and then prints numbers to the standard output. The example shows how the processing of one item returns multiple items.
It prints:
integer = 1
integer = 2
integer = 3
integer = 4
integer = 5
integer = 6
integer = 7
integer = 8
integer = 9
integer = 10
You can adapt the sample to read/write from/to files.
Hope this helps.

Spring Batch java config error using ClassifierCompositeItemWriter

I'm using spring batch with java configuration (new to this) and I'm running into a error when trying to use a ClassifierCompositeItemWriter so generate separate files based on a classifier.
The error im getting is org.springframework.batch.item.WriterNotOpenException: Writer must be open before it can be written to
My configuration looks like follows:
package com.infonova.btcompute.batch.geneva.job;
import com.infonova.btcompute.batch.billruntransfer.BillRunTranStatusFinishedJobAssignment;
import com.infonova.btcompute.batch.billruntransfer.BillRunTranStatusInprogressJobAssignment;
import com.infonova.btcompute.batch.billruntransfer.BillRunTransferStatus;
import com.infonova.btcompute.batch.geneva.camel.GenevaJobLauncher;
import com.infonova.btcompute.batch.geneva.dto.GenevaDetailsResultsDto;
import com.infonova.btcompute.batch.geneva.dto.GenveaDetailsTransactionDto;
import com.infonova.btcompute.batch.geneva.properties.GenevaDetailsExportJobProperties;
import com.infonova.btcompute.batch.geneva.rowmapper.GenevaDetailsTransactionsRowMapper;
import com.infonova.btcompute.batch.geneva.steps.*;
import com.infonova.btcompute.batch.repository.BillrunTransferStatusMapper;
import com.infonova.btcompute.batch.utils.FileNameGeneration;
import com.infonova.product.batch.camel.CamelEnabledJob;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.support.ClassifierCompositeItemWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.classify.BackToBackPatternClassifier;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.io.File;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public abstract class AbstractGenevaDetailsExportJob extends CamelEnabledJob {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGenevaDetailsExportJob.class);
#Autowired
protected JobBuilderFactory jobBuilders;
#Autowired
protected StepBuilderFactory stepBuilders;
#Autowired
protected DataSource datasource;
#Autowired
private BillrunTransferStatusMapper billrunTransferStatusMapper;
#Autowired
protected JdbcTemplate jdbcTemplate;
public abstract GenevaDetailsExportJobProperties jobProperties();
#Bean
public RouteBuilder routeBuilder(final GenevaDetailsExportJobProperties jobProperties, final Job job) {
return new RouteBuilder() {
#Override
public void configure() throws Exception {
from(jobProperties.getConsumer())
.transacted("PROPAGATION_REQUIRED")
.routeId(jobProperties.getInputRouteName())
.process(genevaJobLauncher(job));
//.to("ftp://app#127.0.0.1?password=secret");
}
};
}
#Bean
public Processor genevaJobLauncher(Job job) {
return new GenevaJobLauncher(job);
}
#Bean
#StepScope
public GenevaDetailsReader reader() {
GenevaDetailsReader reader = new GenevaDetailsReader(jobProperties().getMandatorKey(),
jobProperties().getInvoiceType(), jobProperties().getSqlResourcePath());
reader.setSql("");
reader.setDataSource(datasource);
reader.setRowMapper(new GenevaDetailsTransactionsRowMapper());
reader.setFetchSize(jobProperties().getFetchSize());
return reader;
}
#Bean
#StepScope
public GenevaDetailsItemProcessor processor() {
return new GenevaDetailsItemProcessor();
}
#Bean
#StepScope
public ClassifierCompositeItemWriter writer() {
List<String> serviceCodes = new ArrayList<>();//billrunTransferStatusMapper.getServiceCodes(jobProperties().getMandatorKey());
Long billingTaskId = billrunTransferStatusMapper.getCurrentTaskId(jobProperties().getMandatorKey());
String countryKey = billrunTransferStatusMapper.getCountryKey(billingTaskId);
serviceCodes.add("BTCC");
serviceCodes.add("CCMS");
BackToBackPatternClassifier classifier = new BackToBackPatternClassifier();
classifier.setRouterDelegate(new GenveaDetailsRouterClassifier());
HashMap<String, Object> map = new HashMap<>();
for (String serviceCode : serviceCodes) {
map.put(serviceCode, genevaDetailsWriter(serviceCode, countryKey));
}
classifier.setMatcherMap(map);
ClassifierCompositeItemWriter<GenveaDetailsTransactionDto> writer = new ClassifierCompositeItemWriter<>();
writer.setClassifier(classifier);
return writer;
}
#Bean
#StepScope
public GenevaDetailsFlatFileItemWriter genevaDetailsWriter(String serviceCode, String countryKey) {
GenevaDetailsFlatFileItemWriter writer = new GenevaDetailsFlatFileItemWriter(jobProperties().getDelimiter());
FileNameGeneration fileNameGeneration = new FileNameGeneration();
try {
FileSystemResource fileSystemResource = new FileSystemResource(new File(jobProperties().getExportDir(), fileNameGeneration.generateFileName(jdbcTemplate,
serviceCode, countryKey)));
writer.setResource(fileSystemResource);
} catch (SQLException e) {
LOGGER.error("Error creating FileSystemResource : " + e.getMessage());
}
return writer;
}
#Bean
public Job job() {
return jobBuilders.get(jobProperties().getJobName())
.start(setBillRunTransferStatusDetailInprogressStep())
.next(processGenevaDetailsStep())
.next(setBillRunTransferStatusProcessedStep())
.build();
}
#Bean
public Step setBillRunTransferStatusDetailInprogressStep() {
return stepBuilders.get("setBillRunTransferStatusDetailInprogressStep")
.tasklet(setBillRunTransferStatusDetailInprogress())
.build();
}
#Bean
public Tasklet setBillRunTransferStatusDetailInprogress() {
return new BillRunTranStatusInprogressJobAssignment(BillRunTransferStatus.SUMMARY.toString(), BillRunTransferStatus.DETAILS_INPROGRESS.toString(),
jobProperties().getMandatorKey(), jobProperties().getInvoiceTypeNum(), jobProperties().getReportTypeNum());
}
#Bean
public Step setBillRunTransferStatusProcessedStep() {
return stepBuilders.get("setBillRunTransferStatusProcessedStep")
.tasklet(setBillRunTransferStatusProcessed())
.build();
}
#Bean
public Tasklet setBillRunTransferStatusProcessed() {
return new BillRunTranStatusFinishedJobAssignment(BillRunTransferStatus.PROCESSED.toString());
}
#Bean
public Step processGenevaDetailsStep() {
return stepBuilders.get("processGenevaDetailsStep")
.<GenveaDetailsTransactionDto, GenevaDetailsResultsDto>chunk(jobProperties().getChunkSize())
.reader(reader())
.processor(processor())
.writer(writer())
.build();
}
}
and my writer looks like:
package com.infonova.btcompute.batch.geneva.steps;
import com.infonova.btcompute.batch.geneva.dto.GenevaDetailsResultsDto;
import com.infonova.btcompute.batch.repository.BillrunTransferStatusMapper;
import com.infonova.btcompute.batch.utils.FileNameGeneration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.item.*;
import org.springframework.batch.item.file.FlatFileHeaderCallback;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
#Component
public class GenevaDetailsFlatFileItemWriter extends FlatFileItemWriter<GenevaDetailsResultsDto> {
private static final Logger LOGGER = LoggerFactory.getLogger(GenevaDetailsFlatFileItemWriter.class);
#Autowired
protected JdbcTemplate jdbcTemplate;
#Autowired
private BillrunTransferStatusMapper billrunTransferStatusMapper;
private String delimiter;
public GenevaDetailsFlatFileItemWriter(String delimiter) {
this.delimiter = delimiter;
this.setLineAggregator(getLineAggregator());
this.setHeaderCallback(getHeaderCallback());
}
private DelimitedLineAggregator<GenevaDetailsResultsDto> getLineAggregator() {
DelimitedLineAggregator<GenevaDetailsResultsDto> delLineAgg = new DelimitedLineAggregator<>();
delLineAgg.setDelimiter(delimiter);
BeanWrapperFieldExtractor<GenevaDetailsResultsDto> fieldExtractor = new BeanWrapperFieldExtractor<>();
fieldExtractor.setNames(getNames());
delLineAgg.setFieldExtractor(fieldExtractor);
return delLineAgg;
}
private String[] getHeaderNames() {
return new String[] {"Record ID", "Service Identifier", "Billing Account Reference", "Cost Description", "Event Cost",
"Event Date and Time", "Currency Code", "Charge Category", "Order Identifier", "Net Usage", "UOM",
"Quantity", "Service Start Date", "Service End Date"};
}
private String[] getNames() {
return new String[] {"RECORD_ID", "SERVICE_CODE", "BILLING_ACCOUNT_REFERENCE", "COST_DESCRIPTION", "EVENT_COST",
"EVENT_DATE_AND_TIME", "CURRENCY_CODE", "CHARGE_CATEGORY", "ORDER_IDENTIFIER", "NET_USAGE", "UOM",
"QUANTITY", "SERVICE_START_DATE", "SERVICE_END_DATE"};
}
private FlatFileHeaderCallback getHeaderCallback()
{
return new FlatFileHeaderCallback() {
#Override
public void writeHeader(Writer writer) throws IOException {
writer.write(String.join(delimiter, getHeaderNames()));
}
};
}
// #BeforeStep
// public void beforeStep(StepExecution stepExecution) {
// billingTaskId = (Long) stepExecution.getJobExecution().getExecutionContext().get("billingTaskId");
// FileNameGeneration fileNameGeneration = new FileNameGeneration();
//
// try {
// FileSystemResource fileSystemResource = new FileSystemResource(new File(exportDir, fileNameGeneration.generateFileName(jdbcTemplate,
// serviceCode, billrunTransferStatusMapper.getCountryKey(billingTaskId))));
// setResource(fileSystemResource);
// } catch (SQLException e) {
// LOGGER.error("Error creating FileSystemResource : " + e.getMessage());
// }
// }
}
I have searched the web and cannot find a solution to this issue.
What #Hansjoerg Wingeier wrote about ClassifierCompositeItemWriter is correct, but the right way to resolve the problem is to register delegated writer(s) as stream(s) using AbstractTaskletStepBuilder.stream() to let SB manage execution context lifecycle.
ClassifierCompositeItemWriter does not implement the ItemStream interface, hence the open method of your FlatFileItemWriter is never called.
The easiest thing to do is to call the open method when you create your classifier map:
for (String serviceCode : serviceCodes) {
FlatFileItemWriter writer =genevaDetailsWriter(serviceCode, countryKey);
writer.open (new ExecutionContext ());
map.put(serviceCode, writer);
}

How to send and receive file with Java RabbitMQ?

How to send file with Java RabbitMQ?
Especially using message converter.
I'm using Spring Framework, can send String or ArrayList but can't send File. I'm only use convertAndSend and convertAndReceive to send File but get :
org.springframework.amqp.AmqpIOException: java.io.FileNotFoundException
I don't know how to use message converter. The code from here and change some class :
HelloWorldHandler.java
package org.springframework.amqp.helloworld.async;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import org.springframework.amqp.core.Message;
public class HelloWorldHandler {
public void handleMessage(File message) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(message));
System.out.println(br.readLine());
}
}
ProducerConfiguration.java
package org.springframework.amqp.helloworld.async;
import java.io.File;
import java.util.concurrent.atomic.AtomicInteger;
import org.springframework.amqp.rabbit.connection.CachingConnectionFactory;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor;
#Configuration
public class ProducerConfiguration {
protected final String helloWorldQueueName = "hello.world.queue";
#Bean
public RabbitTemplate rabbitTemplate() {
RabbitTemplate template = new RabbitTemplate(connectionFactory());
template.setRoutingKey(this.helloWorldQueueName);
return template;
}
#Bean
public ConnectionFactory connectionFactory() {
CachingConnectionFactory connectionFactory = new CachingConnectionFactory("x.x.x.x");
connectionFactory.setUsername("username");
connectionFactory.setPassword("password");
return connectionFactory;
}
#Bean
public ScheduledProducer scheduledProducer() {
return new ScheduledProducer();
}
#Bean
public BeanPostProcessor postProcessor() {
return new ScheduledAnnotationBeanPostProcessor();
}
static class ScheduledProducer {
#Autowired
private volatile RabbitTemplate rabbitTemplate;
private final AtomicInteger counter = new AtomicInteger();
#Scheduled(fixedRate = 3000)
public void sendMessage() {
rabbitTemplate.convertAndSend(new File("test.txt"));
}
}
}
You can convert the file content into byte array and send the byte[] as below.
byte[] fileData = // get content from file as byte[] [Refer Here][1]
String fileType = // get file type from file
Message message = MessageBuilder.withBody(fileData).setHeader("ContentType", fileType).build();
rabbitTemplate.send("exchnage name", "routing key", message);

Categories

Resources