Spring Batch java config error using ClassifierCompositeItemWriter - java

I'm using spring batch with java configuration (new to this) and I'm running into a error when trying to use a ClassifierCompositeItemWriter so generate separate files based on a classifier.
The error im getting is org.springframework.batch.item.WriterNotOpenException: Writer must be open before it can be written to
My configuration looks like follows:
package com.infonova.btcompute.batch.geneva.job;
import com.infonova.btcompute.batch.billruntransfer.BillRunTranStatusFinishedJobAssignment;
import com.infonova.btcompute.batch.billruntransfer.BillRunTranStatusInprogressJobAssignment;
import com.infonova.btcompute.batch.billruntransfer.BillRunTransferStatus;
import com.infonova.btcompute.batch.geneva.camel.GenevaJobLauncher;
import com.infonova.btcompute.batch.geneva.dto.GenevaDetailsResultsDto;
import com.infonova.btcompute.batch.geneva.dto.GenveaDetailsTransactionDto;
import com.infonova.btcompute.batch.geneva.properties.GenevaDetailsExportJobProperties;
import com.infonova.btcompute.batch.geneva.rowmapper.GenevaDetailsTransactionsRowMapper;
import com.infonova.btcompute.batch.geneva.steps.*;
import com.infonova.btcompute.batch.repository.BillrunTransferStatusMapper;
import com.infonova.btcompute.batch.utils.FileNameGeneration;
import com.infonova.product.batch.camel.CamelEnabledJob;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.support.ClassifierCompositeItemWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.classify.BackToBackPatternClassifier;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import javax.sql.DataSource;
import java.io.File;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public abstract class AbstractGenevaDetailsExportJob extends CamelEnabledJob {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGenevaDetailsExportJob.class);
#Autowired
protected JobBuilderFactory jobBuilders;
#Autowired
protected StepBuilderFactory stepBuilders;
#Autowired
protected DataSource datasource;
#Autowired
private BillrunTransferStatusMapper billrunTransferStatusMapper;
#Autowired
protected JdbcTemplate jdbcTemplate;
public abstract GenevaDetailsExportJobProperties jobProperties();
#Bean
public RouteBuilder routeBuilder(final GenevaDetailsExportJobProperties jobProperties, final Job job) {
return new RouteBuilder() {
#Override
public void configure() throws Exception {
from(jobProperties.getConsumer())
.transacted("PROPAGATION_REQUIRED")
.routeId(jobProperties.getInputRouteName())
.process(genevaJobLauncher(job));
//.to("ftp://app#127.0.0.1?password=secret");
}
};
}
#Bean
public Processor genevaJobLauncher(Job job) {
return new GenevaJobLauncher(job);
}
#Bean
#StepScope
public GenevaDetailsReader reader() {
GenevaDetailsReader reader = new GenevaDetailsReader(jobProperties().getMandatorKey(),
jobProperties().getInvoiceType(), jobProperties().getSqlResourcePath());
reader.setSql("");
reader.setDataSource(datasource);
reader.setRowMapper(new GenevaDetailsTransactionsRowMapper());
reader.setFetchSize(jobProperties().getFetchSize());
return reader;
}
#Bean
#StepScope
public GenevaDetailsItemProcessor processor() {
return new GenevaDetailsItemProcessor();
}
#Bean
#StepScope
public ClassifierCompositeItemWriter writer() {
List<String> serviceCodes = new ArrayList<>();//billrunTransferStatusMapper.getServiceCodes(jobProperties().getMandatorKey());
Long billingTaskId = billrunTransferStatusMapper.getCurrentTaskId(jobProperties().getMandatorKey());
String countryKey = billrunTransferStatusMapper.getCountryKey(billingTaskId);
serviceCodes.add("BTCC");
serviceCodes.add("CCMS");
BackToBackPatternClassifier classifier = new BackToBackPatternClassifier();
classifier.setRouterDelegate(new GenveaDetailsRouterClassifier());
HashMap<String, Object> map = new HashMap<>();
for (String serviceCode : serviceCodes) {
map.put(serviceCode, genevaDetailsWriter(serviceCode, countryKey));
}
classifier.setMatcherMap(map);
ClassifierCompositeItemWriter<GenveaDetailsTransactionDto> writer = new ClassifierCompositeItemWriter<>();
writer.setClassifier(classifier);
return writer;
}
#Bean
#StepScope
public GenevaDetailsFlatFileItemWriter genevaDetailsWriter(String serviceCode, String countryKey) {
GenevaDetailsFlatFileItemWriter writer = new GenevaDetailsFlatFileItemWriter(jobProperties().getDelimiter());
FileNameGeneration fileNameGeneration = new FileNameGeneration();
try {
FileSystemResource fileSystemResource = new FileSystemResource(new File(jobProperties().getExportDir(), fileNameGeneration.generateFileName(jdbcTemplate,
serviceCode, countryKey)));
writer.setResource(fileSystemResource);
} catch (SQLException e) {
LOGGER.error("Error creating FileSystemResource : " + e.getMessage());
}
return writer;
}
#Bean
public Job job() {
return jobBuilders.get(jobProperties().getJobName())
.start(setBillRunTransferStatusDetailInprogressStep())
.next(processGenevaDetailsStep())
.next(setBillRunTransferStatusProcessedStep())
.build();
}
#Bean
public Step setBillRunTransferStatusDetailInprogressStep() {
return stepBuilders.get("setBillRunTransferStatusDetailInprogressStep")
.tasklet(setBillRunTransferStatusDetailInprogress())
.build();
}
#Bean
public Tasklet setBillRunTransferStatusDetailInprogress() {
return new BillRunTranStatusInprogressJobAssignment(BillRunTransferStatus.SUMMARY.toString(), BillRunTransferStatus.DETAILS_INPROGRESS.toString(),
jobProperties().getMandatorKey(), jobProperties().getInvoiceTypeNum(), jobProperties().getReportTypeNum());
}
#Bean
public Step setBillRunTransferStatusProcessedStep() {
return stepBuilders.get("setBillRunTransferStatusProcessedStep")
.tasklet(setBillRunTransferStatusProcessed())
.build();
}
#Bean
public Tasklet setBillRunTransferStatusProcessed() {
return new BillRunTranStatusFinishedJobAssignment(BillRunTransferStatus.PROCESSED.toString());
}
#Bean
public Step processGenevaDetailsStep() {
return stepBuilders.get("processGenevaDetailsStep")
.<GenveaDetailsTransactionDto, GenevaDetailsResultsDto>chunk(jobProperties().getChunkSize())
.reader(reader())
.processor(processor())
.writer(writer())
.build();
}
}
and my writer looks like:
package com.infonova.btcompute.batch.geneva.steps;
import com.infonova.btcompute.batch.geneva.dto.GenevaDetailsResultsDto;
import com.infonova.btcompute.batch.repository.BillrunTransferStatusMapper;
import com.infonova.btcompute.batch.utils.FileNameGeneration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.item.*;
import org.springframework.batch.item.file.FlatFileHeaderCallback;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor;
import org.springframework.batch.item.file.transform.DelimitedLineAggregator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
#Component
public class GenevaDetailsFlatFileItemWriter extends FlatFileItemWriter<GenevaDetailsResultsDto> {
private static final Logger LOGGER = LoggerFactory.getLogger(GenevaDetailsFlatFileItemWriter.class);
#Autowired
protected JdbcTemplate jdbcTemplate;
#Autowired
private BillrunTransferStatusMapper billrunTransferStatusMapper;
private String delimiter;
public GenevaDetailsFlatFileItemWriter(String delimiter) {
this.delimiter = delimiter;
this.setLineAggregator(getLineAggregator());
this.setHeaderCallback(getHeaderCallback());
}
private DelimitedLineAggregator<GenevaDetailsResultsDto> getLineAggregator() {
DelimitedLineAggregator<GenevaDetailsResultsDto> delLineAgg = new DelimitedLineAggregator<>();
delLineAgg.setDelimiter(delimiter);
BeanWrapperFieldExtractor<GenevaDetailsResultsDto> fieldExtractor = new BeanWrapperFieldExtractor<>();
fieldExtractor.setNames(getNames());
delLineAgg.setFieldExtractor(fieldExtractor);
return delLineAgg;
}
private String[] getHeaderNames() {
return new String[] {"Record ID", "Service Identifier", "Billing Account Reference", "Cost Description", "Event Cost",
"Event Date and Time", "Currency Code", "Charge Category", "Order Identifier", "Net Usage", "UOM",
"Quantity", "Service Start Date", "Service End Date"};
}
private String[] getNames() {
return new String[] {"RECORD_ID", "SERVICE_CODE", "BILLING_ACCOUNT_REFERENCE", "COST_DESCRIPTION", "EVENT_COST",
"EVENT_DATE_AND_TIME", "CURRENCY_CODE", "CHARGE_CATEGORY", "ORDER_IDENTIFIER", "NET_USAGE", "UOM",
"QUANTITY", "SERVICE_START_DATE", "SERVICE_END_DATE"};
}
private FlatFileHeaderCallback getHeaderCallback()
{
return new FlatFileHeaderCallback() {
#Override
public void writeHeader(Writer writer) throws IOException {
writer.write(String.join(delimiter, getHeaderNames()));
}
};
}
// #BeforeStep
// public void beforeStep(StepExecution stepExecution) {
// billingTaskId = (Long) stepExecution.getJobExecution().getExecutionContext().get("billingTaskId");
// FileNameGeneration fileNameGeneration = new FileNameGeneration();
//
// try {
// FileSystemResource fileSystemResource = new FileSystemResource(new File(exportDir, fileNameGeneration.generateFileName(jdbcTemplate,
// serviceCode, billrunTransferStatusMapper.getCountryKey(billingTaskId))));
// setResource(fileSystemResource);
// } catch (SQLException e) {
// LOGGER.error("Error creating FileSystemResource : " + e.getMessage());
// }
// }
}
I have searched the web and cannot find a solution to this issue.

What #Hansjoerg Wingeier wrote about ClassifierCompositeItemWriter is correct, but the right way to resolve the problem is to register delegated writer(s) as stream(s) using AbstractTaskletStepBuilder.stream() to let SB manage execution context lifecycle.

ClassifierCompositeItemWriter does not implement the ItemStream interface, hence the open method of your FlatFileItemWriter is never called.
The easiest thing to do is to call the open method when you create your classifier map:
for (String serviceCode : serviceCodes) {
FlatFileItemWriter writer =genevaDetailsWriter(serviceCode, countryKey);
writer.open (new ExecutionContext ());
map.put(serviceCode, writer);
}

Related

Spring batch custom reader to map all the values to array field

Hi folks i am new in spring batch and stuck in below problem, requesting to give some suggestion
Problem statement = spring batch reader after reading data it should map to array field
input file(tab seprated)
val1 val2 val3
va21 va22 va23
va31 va32 va33
Pojo
class SamplePojo{
private Object[] value;
}
reader should map the field as mention below
output pojo
value[0] = val1
value[1] = va12
value[2] = va13
value[0] = va21
value[1] = va22
value[2] = va23
value[0] = va31
value[1] = va32
value[2] = va33
how can i configure my reader to achive above output
FieldSetMapper is the strategy interface that allows you to customize how tokenized lines are mapped to domain objects. In your case, you can use a FlatFileItemReader with a DelimitedLineTokenizer and a custom FieldSetMapper. Here is a quick example:
import java.util.Arrays;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.FieldSet;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.validation.BindException;
#Configuration
#EnableBatchProcessing
public class SO72571852 {
#Bean
public FlatFileItemReader<Pojo> itemReader() throws Exception {
DefaultLineMapper<Pojo> lineMapper = new DefaultLineMapper<>();
lineMapper.setLineTokenizer(new DelimitedLineTokenizer(" "));
lineMapper.setFieldSetMapper(new FieldSetMapper<Pojo>() {
#Override
public Pojo mapFieldSet(FieldSet fieldSet) throws BindException {
Pojo pojo = new Pojo();
Object[] values = new Object[3];
values[0] = fieldSet.readString(0);
values[1] = fieldSet.readString(1);
values[2] = fieldSet.readString(2);
pojo.values = values;
return pojo;
}
});
FlatFileItemReader<Pojo> flatFileItemReader = new FlatFileItemReader<>();
flatFileItemReader.setResource(new FileSystemResource("input.tsv"));
flatFileItemReader.setLineMapper(lineMapper);
flatFileItemReader.afterPropertiesSet();
return flatFileItemReader;
}
#Bean
public ItemWriter<Pojo> itemWriter() {
return items -> {items.forEach(System.out::println);};
}
#Bean
public Job job(JobBuilderFactory jobs, StepBuilderFactory steps) throws Exception {
return jobs.get("job")
.start(steps.get("step")
.<Pojo, Pojo>chunk(5)
.reader(itemReader())
.writer(itemWriter())
.build())
.build();
}
#Bean
public DataSource dataSource() {
return new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.HSQL)
.addScript("/org/springframework/batch/core/schema-hsqldb.sql")
.build();
}
static class Pojo {
private Object[] values;
#Override
public String toString() {
return "Pojo{values=" + Arrays.toString(values) + '}';
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(SO72571852.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}

Read .xlsx File from Drool Engine Dynamically without restarting spring project

I am trying to read my excel file using drool I am able to successfully read the file from s3 but the problem comes when I make changes to that file and replace the existing file with new file my rule engine still reads old file values can anyone tell what can be done to resolve this issue
Dto
package com.example.demo;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.stereotype.Component;
#Getter
#Setter
#ToString
#Component
#RefreshScope
public class DataDTO {
String firstName;
String lastName;
}
Drool Config
package com.example.demo.Config;
import lombok.extern.slf4j.Slf4j;
import org.drools.decisiontable.DecisionTableProviderImpl;
import org.kie.api.KieServices;
import org.kie.api.builder.*;
import org.kie.api.io.Resource;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.internal.io.ResourceFactory;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.io.IOException;
#Configuration
#Slf4j
#RefreshScope
public class DroolConfig {
private KieServices kieServices = KieServices.Factory.get();
public KieFileSystem getKieFileSystem() throws IOException {
KieFileSystem kieFileSystem = kieServices.newKieFileSystem();
Resource resource;
log.info("File read form local resource folder");
resource = ResourceFactory
.newUrlResource("awsurl/123.xlsx");
kieFileSystem.write(resource);
DecisionTableProviderImpl decisionTableProvider
= new DecisionTableProviderImpl();
String leadBox = decisionTableProvider.loadFromResource(resource, null);
log.info("Rules Loaded from LeadBoxFile:{}", leadBox);
return kieFileSystem;
}
#Bean
#RefreshScope
public KieContainer getKieContainer() throws IOException {
getKieRepository();
KieBuilder abc = kieServices.newKieBuilder(getKieFileSystem());
abc.buildAll();
KieModule kieModule = abc.getKieModule();
KieContainer kContainer = kieServices.newKieContainer(kieModule.getReleaseId());
return kContainer;
}
private void getKieRepository() {
final KieRepository kieRepository = kieServices.getRepository();
kieRepository.addKieModule(new KieModule() {
public ReleaseId getReleaseId() {
return kieRepository.getDefaultReleaseId();
}
});
}
#Bean
public KieSession getKieSession() throws IOException {
return getKieContainer().newKieSession();
}
}
Controller
package com.example.demo;
import com.example.demo.Config.DroolConfig;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
#RestController
#RequestMapping("/v2")
public class LoadData {
#Autowired
private KieSession kieSession;
#Autowired
private final KieContainer kieContainer;
#Autowired
DroolConfig droolConfig;
public LoadData(KieSession kieSession, KieContainer kieContainer) {
this.kieSession = kieSession;
this.kieContainer = kieContainer;
}
#PostMapping("/printData")
public String printData(#RequestBody DataDTO dataDTO) throws IOException {
KieSession kieSession = kieContainer.newKieSession();
kieSession.insert(dataDTO);
kieSession.fireAllRules();
kieSession.dispose();
return dataDTO.getLastName();
}
#GetMapping("/loadDroolFile")
public String getDroolData() throws IOException {
droolConfig.getKieFileSystem();
kieContainer.getKieSessionConfiguration();
return "Done";
}
}
Whenever I try to hit loadDroolFile I am able to get refreshed data but the data does not get injected I still get the result from drool engine using old values

How to write a proper unit test for Elasticsearch in Java

Overview:
I'm totally new to Elastic search testing and I'm gonna add proper unit tests. The project compatibilities are as follow:
Java 8
Elasticsearch 6.2.4
Project uses low level rest client for fetching data from ES
More info about ES configurations is as follow:
import static java.net.InetAddress.getByName;
import static java.util.Arrays.stream;
import java.net.UnknownHostException;
import java.util.Map;
import java.util.Objects;
import javax.inject.Inject;
import org.apache.http.HttpHost;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import au.com.api.util.RestClientUtil;
import lombok.extern.slf4j.Slf4j;
#Slf4j
#Configuration
public class ElasticConfiguration implements InitializingBean{
#Value(value = "${elasticsearch.hosts}")
private String[] hosts;
#Value(value = "${elasticsearch.httpPort}")
private int httpPort;
#Value(value = "${elasticsearch.tcpPort}")
private int tcpPort;
#Value(value = "${elasticsearch.clusterName}")
private String clusterName;
#Inject
private RestClientUtil client;
#Bean
public RestHighLevelClient restHighClient() {
return new RestHighLevelClient(RestClient.builder(httpHosts()));
}
#Bean
#Deprecated
public RestClient restClient() {
return RestClient.builder(httpHosts()).build();
}
/**
* #return TransportClient
* #throws UnknownHostException
*/
#SuppressWarnings("resource")
#Bean
public TransportClient transportClient() throws UnknownHostException{
Settings settings = Settings.builder()
.put("cluster.name", clusterName).build();
return new PreBuiltTransportClient(settings).addTransportAddresses(transportAddresses());
}
#Override
public void afterPropertiesSet() throws Exception {
log.debug("loading search templates...");
try {
for (Map.Entry<String, String> entry : Constants.SEARCH_TEMPLATE_MAP.entrySet()) {
client.putInlineSearchTemplateToElasticsearch(entry.getKey(), entry.getValue());
}
} catch (Exception e) {
log.error("Exception has occurred in putting search templates into ES.", e);
}
}
private HttpHost[] httpHosts() {
return stream(hosts).map(h -> new HttpHost(h, httpPort, "http")).toArray(HttpHost[]::new);
}
private TransportAddress[] transportAddresses() throws UnknownHostException {
TransportAddress[] transportAddresses = stream(hosts).map(h -> {
try {
return new TransportAddress(getByName(h), tcpPort);
} catch (UnknownHostException e) {
log.error("Exception has occurred in creating ES TransportAddress. host: '{}', tcpPort: '{}'", h, tcpPort, e);
}
return null;
}).filter(Objects::nonNull).toArray(TransportAddress[]::new);
if (transportAddresses.length == 0) {
throw new UnknownHostException();
}
return transportAddresses;
}
}
Issue:
I don't know how to Mock ES or how to test ES without running an standalone ES on my machine. Please use the following class as an example and let me know how could I write a testcase (unit test not integration) for getSearchResponse method:
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.mustache.SearchTemplateRequestBuilder;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.Aggregation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Repository;
#Slf4j
#Repository
#NoArgsConstructor
public abstract class NewBaseElasticsearchRepository {
#Autowired
protected NewIndexLocator newIndexLocator;
#Value(value = "${elasticsearch.client.timeout}")
private Long timeout;
#Autowired
protected TransportClient transportClient;
#Autowired
protected ThresholdService thresholdService;
#Autowired
protected MessageSource messageSource;
/**
* #param script the name of the script to be executed
* #param templateParams a map of the parameters to be sent to the script
* #param indexName the index to target (an empty indexName will search all indexes)
*
* #return a Search Response object containing details of the request results from Elasticsearch
*
* #throws NoNodeAvailableException thrown when the transport client cannot connect to any ES Nodes (or Coordinators)
* #throws Exception thrown for all other request errors such as parsing and non-connectivity related issues
*/
protected SearchResponse getSearchResponse(String script, Map<String, Object> templateParams, String... indexName) {
log.debug("transport client >> index name --> {}", Arrays.toString(indexName));
SearchResponse searchResponse;
try {
searchResponse = new SearchTemplateRequestBuilder(transportClient)
.setScript(script)
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
.setRequest(new SearchRequest(indexName))
.execute()
.actionGet(timeout)
.getResponse();
} catch (NoNodeAvailableException e) {
log.error(ELASTIC_SEARCH_EXCEPTION_NOT_FOUND, e.getMessage());
throw new ElasticSearchException(ELASTIC_SEARCH_EXCEPTION_NOT_FOUND);
} catch (Exception e) {
log.error(ELASTIC_SEARCH_EXCEPTION, e.getMessage());
throw new ElasticSearchException(ELASTIC_SEARCH_EXCEPTION);
}
log.debug("searchResponse ==> {}", searchResponse);
return searchResponse;
}
So, I would be grateful if you could have a look on the example class and share your genuine solutions with me here about how could I mock TransportClient and get a proper response from SearchResponse object.
Note:
I tried to use ESTestCase from org.elasticsearch.test:framework:6.2.4 but faced jar hell issue and could't resolve it. In the meantime, I could't find any proper docs related to that or Java ES unit testing, in general.

Can't Access Atmosphere MetaBroadcaster in Spring Boot Controller

I have a simple chat application set up using Spring Boot (1.3.2) and Atmosphere (2.4.2).
Here is my WebSocketConfigurer:
package com.chat.shared.websocket;
import java.util.Collections;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRegistration;
import org.apache.catalina.Context;
import org.apache.tomcat.websocket.server.WsSci;
import org.atmosphere.cpr.AtmosphereFramework;
import org.atmosphere.cpr.AtmosphereServlet;
import org.atmosphere.cpr.MetaBroadcaster;
import org.springframework.boot.context.embedded.ServletContextInitializer;
import org.springframework.boot.context.embedded.tomcat.TomcatContextCustomizer;
import org.springframework.boot.context.embedded.tomcat.TomcatEmbeddedServletContainerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.chat.privatechat.ChatChannel;
#Configuration
public class WebSocketConfigurer implements ServletContextInitializer {
#Bean
public TomcatEmbeddedServletContainerFactory tomcatContainerFactory() {
TomcatEmbeddedServletContainerFactory factory = new TomcatEmbeddedServletContainerFactory();
factory.setTomcatContextCustomizers(Collections.singletonList(tomcatContextCustomizer()));
return factory;
}
#Bean
public TomcatContextCustomizer tomcatContextCustomizer() {
return new TomcatContextCustomizer() {
#Override
public void customize(Context context) {
context.addServletContainerInitializer(new WsSci(), null);
}
};
}
#Bean
public AtmosphereServlet atmosphereServlet() {
return new AtmosphereServlet();
}
#Bean
public AtmosphereFramework atmosphereFramework() {
return atmosphereServlet().framework();
}
#Bean
public MetaBroadcaster metaBroadcaster() {
AtmosphereFramework framework = atmosphereFramework();
return framework.metaBroadcaster();
}
#Override
public void onStartup(ServletContext servletContext) throws ServletException {
configureAtmosphere(atmosphereServlet(), servletContext);
}
private void configureAtmosphere(AtmosphereServlet servlet, ServletContext servletContext) {
ServletRegistration.Dynamic reg = servletContext.addServlet("atmosphereServlet", servlet);
reg.setInitParameter("org.atmosphere.cpr.packages", ChatChannel.class.getPackage().getName());
reg.setInitParameter("org.atmosphere.cpr.broadcasterClass", "org.atmosphere.plugin.hazelcast.HazelcastBroadcaster");
reg.setInitParameter("org.atmosphere.cpr.broadcaster.maxProcessingThreads", "10");
reg.setInitParameter("org.atmosphere.cpr.broadcaster.maxAsyncWriteThreads", "10");
reg.setInitParameter("org.atmosphere.interceptor.HeartbeatInterceptor.clientHeartbeatFrequencyInSeconds", "10");
servletContext.addListener(new org.atmosphere.cpr.SessionSupport());
reg.addMapping("/chat/*");
reg.setLoadOnStartup(0);
reg.setAsyncSupported(true);
}
}
And here is how I'm currently leveraging it in the ChatChannel:
package com.chat.privatechat;
import com.chat.privatechat.DTOs.ChatMessageDTO;
import com.chat.shared.localmessagebus.LocalMessage;
import com.chat.shared.localmessagebus.LocalMessageBus;
import org.atmosphere.config.service.Disconnect;
import org.atmosphere.config.service.Get;
import org.atmosphere.config.service.ManagedService;
import org.atmosphere.config.service.PathParam;
import org.atmosphere.config.service.Ready;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceFactory;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.MetaBroadcaster;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.inject.Inject;
#ManagedService(path = "/chat/{channel: [a-zA-Z][a-zA-Z_0-9]*}")
public class ChatChannel {
#PathParam("channel")
private String channelUuid;
#Inject
private BroadcasterFactory factory;
#Inject
private AtmosphereResourceFactory resourceFactory;
#Inject
private MetaBroadcaster metaBroadcaster;
#Get
public void init(AtmosphereResource resource) {
resource.getResponse().setCharacterEncoding(StandardCharsets.UTF_8.name());
}
#Ready
public void onReady(final AtmosphereResource resource) {
String userId = resource.getRequest().getHeader("userId");
System.out.println("User " + userId + " has connected.");
}
#Disconnect
public void onDisconnect(AtmosphereResourceEvent event) {
String userId = event.getResource().getRequest().getHeader("userId");
System.out.println("User " + userId + " has disconnected");
}
#org.atmosphere.config.service.Message(encoders = MessageEncoderDecoder.class, decoders = MessageEncoderDecoder.class)
public ChatMessageDTO onMessage(ChatMessageDTO chatMessage) throws IOException {
LocalMessageBus.manager().send(new LocalMessage<ChatMessageDTO>(chatMessage));
return chatMessage;
}
}
This setup works great (users in a conversation are connected to a "channel" and the messages are sent/received immediately. LocalMessageBus is a simple message bus that will eventually be replaced by a proper message broker).
Although I don't have a use case for this, I went to set up a MetaBroadcaster in my ChatController to see if I could broadcast messages from there. Unfortunately, I am not able to properly inject/reference the MetaBroadcaster as it is always null. Here's the important bits of the ChatController:
package com.chat.privatechat;
import java.util.List;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import com.chat.privatechat.DTOs.ChatMessageDTO;
import com.chat.privatechat.DTOs.ChatSessionInitializationDTO;
import com.chat.privatechat.DTOs.EstablishedChatSessionDTO;
import com.chat.shared.http.JSONResponseHelper;
import com.chat.user.UserService;
import com.chat.user.exceptions.IsSameUserException;
import com.chat.user.exceptions.UserNotFoundException;
import com.chat.user.strategies.UserRetrievalByIdStrategy;
#Controller
public class ChatController {
#Autowired
private ChatService chatService;
#Autowired
private BeanFactory beanFactory;
#Autowired
private UserService userService;
#Inject
private MetaBroadcaster metaBroadcaster;
#RequestMapping(value="/api/chat/session", method=RequestMethod.PUT, produces="application/json", consumes="application/json")
public ResponseEntity<String> establishChatSession(#RequestBody ChatSessionInitializationDTO initialChatSession) throws IsSameUserException, BeansException, UserNotFoundException {
...
}
#RequestMapping(value="/api/chat/session/{channelUuid}", method=RequestMethod.GET, produces="application/json")
public ResponseEntity<String> getExistingChatSessionMessages(#PathVariable("channelUuid") String channelUuid) {
...
}
}
Injecting/Autowiring MetaBroadcaster nor bringing in metaBroadcaster bean from the BeanFactory work. I've searched and searched and searched without a good solution. It seems like the bean in not accessible in this Spring Controller context and I'm running out of ideas.
Thanks you for any input!
NOTE: These are the Atmosphere deps I have:
<dependency>
<groupId>org.atmosphere</groupId>
<artifactId>atmosphere-runtime</artifactId>
<version>2.4.2</version>
</dependency>
<dependency>
<groupId>org.atmosphere</groupId>
<artifactId>atmosphere-spring</artifactId>
<version>2.4.2</version>
</dependency>
<dependency>
<groupId>org.webjars</groupId>
<artifactId>atmosphere-javascript</artifactId>
<version>2.2.3</version>
</dependency>
<dependency>
<groupId>org.atmosphere</groupId>
<artifactId>atmosphere-hazelcast</artifactId>
<version>2.4.2</version>
</dependency>

How to send and receive file with Java RabbitMQ?

How to send file with Java RabbitMQ?
Especially using message converter.
I'm using Spring Framework, can send String or ArrayList but can't send File. I'm only use convertAndSend and convertAndReceive to send File but get :
org.springframework.amqp.AmqpIOException: java.io.FileNotFoundException
I don't know how to use message converter. The code from here and change some class :
HelloWorldHandler.java
package org.springframework.amqp.helloworld.async;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import org.springframework.amqp.core.Message;
public class HelloWorldHandler {
public void handleMessage(File message) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(message));
System.out.println(br.readLine());
}
}
ProducerConfiguration.java
package org.springframework.amqp.helloworld.async;
import java.io.File;
import java.util.concurrent.atomic.AtomicInteger;
import org.springframework.amqp.rabbit.connection.CachingConnectionFactory;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor;
#Configuration
public class ProducerConfiguration {
protected final String helloWorldQueueName = "hello.world.queue";
#Bean
public RabbitTemplate rabbitTemplate() {
RabbitTemplate template = new RabbitTemplate(connectionFactory());
template.setRoutingKey(this.helloWorldQueueName);
return template;
}
#Bean
public ConnectionFactory connectionFactory() {
CachingConnectionFactory connectionFactory = new CachingConnectionFactory("x.x.x.x");
connectionFactory.setUsername("username");
connectionFactory.setPassword("password");
return connectionFactory;
}
#Bean
public ScheduledProducer scheduledProducer() {
return new ScheduledProducer();
}
#Bean
public BeanPostProcessor postProcessor() {
return new ScheduledAnnotationBeanPostProcessor();
}
static class ScheduledProducer {
#Autowired
private volatile RabbitTemplate rabbitTemplate;
private final AtomicInteger counter = new AtomicInteger();
#Scheduled(fixedRate = 3000)
public void sendMessage() {
rabbitTemplate.convertAndSend(new File("test.txt"));
}
}
}
You can convert the file content into byte array and send the byte[] as below.
byte[] fileData = // get content from file as byte[] [Refer Here][1]
String fileType = // get file type from file
Message message = MessageBuilder.withBody(fileData).setHeader("ContentType", fileType).build();
rabbitTemplate.send("exchnage name", "routing key", message);

Categories

Resources