Spring batch DB to JSON files [duplicate] - java

This question already has answers here:
Spring Batch: Writing data to multiple files with dynamic File Name
(2 answers)
Closed 2 years ago.
This question might seem to be a duplicate of this but it is not
My requirement is to read data from db using JdbcPagingItemReader and process individual records for some additional processing and in writer create individual json files for each processed item with file name id_of_record_json_fie.txt
For example if reader reads 100 records then 100 JSON files has to be created
What is the best way to do this, Can we use spring batch for this ?
Update 1-:
As per #Mahmoud answer, tasklet can be used , I have also tried implementing custom itemwriter in a chunk oriented step , this also seems to work
#Override
public void write(final List<? extends Person> persons) throws Exception {
for (Person person: persons) {
objectMapper.writeValue(new File("D:/cp/dataTwo.json"), person);
}
}

Using a chunk-oriented tasklet won't work, because there will be a single item writer on which the resource is set upfront and will be fixed during the entire step. Using a composite item writer might work but you need to know how many distinct writers to create and configure upfront.
The most straightforward option I see is to use a tasklet, something like:
import java.util.Collections;
import java.util.HashMap;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.Order;
import org.springframework.batch.item.database.builder.JdbcPagingItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
#Configuration
#EnableBatchProcessing
public class MyJob {
#Bean
public JdbcPagingItemReader<Person> itemReader() {
return new JdbcPagingItemReaderBuilder<Person>()
.name("personItemReader")
.dataSource(dataSource())
.beanRowMapper(Person.class)
.selectClause("select *")
.fromClause("from person")
.sortKeys(new HashMap<String, Order>() {{ put("id", Order.DESCENDING);}})
.build();
}
#Bean
public Job job(JobBuilderFactory jobs, StepBuilderFactory steps) {
return jobs.get("job")
.start(steps.get("step")
.tasklet(new MyTasklet(itemReader()))
.build())
.build();
}
private static class MyTasklet implements Tasklet {
private boolean readerInitialized;
private JdbcPagingItemReader<Person> itemReader;
public MyTasklet(JdbcPagingItemReader<Person> itemReader) {
this.itemReader = itemReader;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext();
if (!readerInitialized) {
itemReader.open(executionContext);
readerInitialized = true;
}
Person person = itemReader.read();
if (person == null) {
itemReader.close();
return RepeatStatus.FINISHED;
}
// process the item
process(person);
// write the item in its own file (dynamically generated at runtime)
write(person, executionContext);
// save current state in execution context: in case of restart after failure, the job would resume where it left off.
itemReader.update(executionContext);
return RepeatStatus.CONTINUABLE;
}
private void process(Person person) {
// do something with the item
}
private void write(Person person, ExecutionContext executionContext) throws Exception {
FlatFileItemWriter<Person> itemWriter = new FlatFileItemWriterBuilder<Person>()
.resource(new FileSystemResource("person" + person.getId() + ".csv"))
.name("personItemWriter")
.delimited()
.names("id", "name")
.build();
itemWriter.open(executionContext);
itemWriter.write(Collections.singletonList(person));
itemWriter.close();
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
#Bean
public DataSource dataSource() {
EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.H2)
.addScript("/org/springframework/batch/core/schema-drop-h2.sql")
.addScript("/org/springframework/batch/core/schema-h2.sql")
.build();
JdbcTemplate jdbcTemplate = new JdbcTemplate(embeddedDatabase);
jdbcTemplate.execute("create table person (id int primary key, name varchar(20));");
for (int i = 1; i <= 10; i++) {
jdbcTemplate.execute(String.format("insert into person values (%s, 'foo%s');", i, i));
}
return embeddedDatabase;
}
static class Person {
private int id;
private String name;
public Person() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String toString() {
return "Person{id=" + id + ", name='" + name + '\'' + '}';
}
}
}
This example reads 10 persons from a db table and generates 10 csv files (person1.csv, person2.csv, etc)

Related

Axon Aggregate Not Found When Trying to Update An Existing Record

I am using Axon and MongoDB to implement a microservice using CQRS and Event Sourcing patterns. When I first start the application, everything works fine. I can create an order and update that order. After I restart the application I can still create an order and update THAT order but when I try to update previous orders, Axon throws "Command 'com.example.demo.command.command.UpdateOrderCommand' resulted in org.axonframework.modelling.command.AggregateNotFoundException(The aggregate was not found in the event store)". My theory is that when I create an order it creates an instance of OrderAggregate and uses that instance to update. But when I restart the application that instance goes away and when updating I am not creating an OrderAggregate instance. Here is my OrderAggregate.java file:
package com.example.demo.command.aggreagate;
import com.example.demo.command.command.CreateOrderCommand;
import com.example.demo.command.command.UpdateOrderCommand;
import com.example.demo.common.event.OrderCreatedEvent;
import com.example.demo.common.event.OrderUpdatedEvent;
import com.example.demo.query.entity.Address;
import com.example.demo.query.entity.Product;
import lombok.extern.slf4j.Slf4j;
import org.axonframework.commandhandling.CommandHandler;
import org.axonframework.eventsourcing.EventSourcingHandler;
import org.axonframework.modelling.command.AggregateIdentifier;
import org.axonframework.modelling.command.AggregateLifecycle;
import org.axonframework.spring.stereotype.Aggregate;
#Aggregate
#Slf4j
public class OrderAggregate {
#AggregateIdentifier
private String _id;
private String customerId;
private int quantity;
private double price;
private String status;
private Product product;
private Address address;
private String createdAt;
private String updatedAt;
public OrderAggregate() {
}
#CommandHandler
public OrderAggregate(CreateOrderCommand command) {
log.info("CreateOrderCommand received.");
AggregateLifecycle.apply(new OrderCreatedEvent(command.getId(), command.getCustomerId(),
command.getQuantity(), command.getPrice(), "ORDERED",
command.getProduct(), command.getAddress(), command.getCreatedAt(), command.getUpdatedAt()));
}
#EventSourcingHandler
public void on(OrderCreatedEvent event) {
log.info("An OrderCreatedEvent occurred.");
this._id = event.getId();
this.customerId = event.getCustomerId();
this.quantity = event.getQuantity();
this.price = event.getPrice();
this.status = "CREATED";
this.product = event.getProduct();
this.address = event.getAddress();
this.createdAt = event.getCreatedAt();
this.updatedAt = event.getUpdatedAt();
}
#CommandHandler
public void on(UpdateOrderCommand command) {
log.info("UpdateOrderCommand received.");
AggregateLifecycle.apply(new OrderUpdatedEvent(command.getId(), command.getQuantity(), command.getPrice(),
"UPDATED", command.getProduct(), command.getAddress(), updatedAt));
}
#EventSourcingHandler
public void on(OrderUpdatedEvent event) {
log.info("An OrderUpdatedEvent occurred.");
this.quantity = event.getQuantity();
this.price = event.getPrice();
this.status = event.getStatus();
this.product = event.getProduct();
this.address = event.getAddress();
this.updatedAt = event.getUpdatedAt();
}
}
I tried making on(UpdateOrderCommand) an OrderAggregate constructor. It kind of works but deletes updatedAt field when updating to database. Any help is appreciated!
Edit:
These are my AxonConfig.java and MongoConfig.java files:
AxonConfig.java
import com.mongodb.client.MongoClient;
import org.axonframework.eventsourcing.eventstore.EmbeddedEventStore;
import org.axonframework.eventsourcing.eventstore.EventStorageEngine;
import org.axonframework.eventsourcing.eventstore.EventStore;
import org.axonframework.extensions.mongo.DefaultMongoTemplate;
import org.axonframework.extensions.mongo.MongoTemplate;
import org.axonframework.extensions.mongo.eventsourcing.eventstore.MongoEventStorageEngine;
import org.axonframework.spring.config.AxonConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
public class AxonConfig {
#Bean
public EmbeddedEventStore eventStore(EventStorageEngine storageEngine, AxonConfiguration configuration) {
return EmbeddedEventStore.builder()
.storageEngine(storageEngine)
.messageMonitor(configuration.messageMonitor(EventStore.class, "eventStore"))
.build();
}
// The `MongoEventStorageEngine` stores each event in a separate MongoDB document
#Bean
public EventStorageEngine storageEngine(MongoClient client) {
return MongoEventStorageEngine.builder().mongoTemplate(DefaultMongoTemplate.builder().mongoDatabase(client).build()).build();
}
}
MongoConfig.java:
package com.example.demo.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
#Configuration
public class MongoConfig {
#Bean
ValidatingMongoEventListener validatingMongoEventListener(LocalValidatorFactoryBean validator) {
return new ValidatingMongoEventListener(validator);
}
}
Edit #2:
I updated AxonConfig.java file and this solved my problem:
package com.example.demo.config;
import com.mongodb.client.MongoClient;
import org.axonframework.config.Configurer;
import org.axonframework.config.DefaultConfigurer;
import org.axonframework.eventhandling.tokenstore.TokenStore;
import org.axonframework.eventsourcing.EventCountSnapshotTriggerDefinition;
import org.axonframework.eventsourcing.SnapshotTriggerDefinition;
import org.axonframework.eventsourcing.eventstore.EventStorageEngine;
import org.axonframework.extensions.mongo.DefaultMongoTemplate;
import org.axonframework.extensions.mongo.eventsourcing.eventstore.MongoEventStorageEngine;
import org.axonframework.extensions.mongo.eventsourcing.tokenstore.MongoTokenStore;
import org.axonframework.serialization.Serializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
public class AxonConfig {
#Autowired
public void configuration(Configurer configurer, MongoClient client) {
configurer.configureEmbeddedEventStore(configuration -> storageEngine(client)).eventProcessing(conf -> {
conf.registerTokenStore(configuration -> tokenStore(client, configuration.serializer()));
});
}
#Bean
public EventStorageEngine storageEngine(MongoClient client) {
return MongoEventStorageEngine.builder().mongoTemplate(DefaultMongoTemplate.builder().mongoDatabase(client).build()).build();
}
#Bean
public TokenStore tokenStore(MongoClient client, Serializer serializer) {
return MongoTokenStore.builder().mongoTemplate(DefaultMongoTemplate.builder().mongoDatabase(client).build()).serializer(serializer).build();
}
#Bean
public SnapshotTriggerDefinition snapshotTriggerDefinition(org.axonframework.config.Configuration configuration) {
return new EventCountSnapshotTriggerDefinition(configuration.snapshotter(), 5);
}
#Bean
public Configurer configurer() {
return DefaultConfigurer.defaultConfiguration();
}
}
As was mentioned in the comments on the original post, it was a configuration issue. So I went ahead and dig a little more and found github repository which had an example: (https://github.com/AxonFramework/extension-mongo/tree/master/mongo-axon-example). I implemented the configuration file as was implemented in the repository above and it worked like a charm. Thanks to Lucas Campos and Jan Gelinski for pointing me in the right direction. I will edit the original post accordingly with my solution.

how to dynamically create multiple beans of same type then gather/autowire them

let's say that in my spring(boot) yaml config file I have a list of commands:
commands: [add,delete,copy,move]
and the corresponding class in my spring(boot) project:
public class Command {
private String name;
public Command(String name) {
this.name = name;
}
public void execute() {
System.out.println(name);
}
public String getName() {
return name;
}
}
How can I dynamically/adaptively generate the right number of command beans, then gather/autowire them in a separate class as below ?
public class Menu {
#Autowired
List<Command> commands;
public void display() {
commands.forEach(cmd -> System.out.println(cmd.getName());
}
}
Thank you very much in advance for your time and your expertise.
Regards
For dynamic bean registration, you can use ImportBeanDefinitionRegistrar.
The code will be like this:
import java.util.List;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.core.type.AnnotationMetadata;
#Configuration
#Import(CommandsConfiguration.Registrar.class)
public class CommandsConfiguration {
static class Registrar implements ImportBeanDefinitionRegistrar {
#Override
public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) {
final List<String> commands = // read commands from environemnt/config
for (String command : commands) {
final String beanName = command + "Command";
final BeanDefinition beanDefinition = BeanDefinitionBuilder
.genericBeanDefinition(Command.class, () -> new Command(command))
.getBeanDefinition();
registry.registerBeanDefinition(beanName, beanDefinition);
}
}
}
}

not able to autowire yml config class from spring boot application [duplicate]

This question already has answers here:
Why is my Spring #Autowired field null?
(21 answers)
Closed 5 years ago.
I am not able to #autowire a class in spring boot application. below is the project explorer snapshot:
From my main class CrmDisconnectionApplication, I am calling DisconnectionConTrigger class. In that class I am doing #autowire for YamlConfig. But I am getting null pointer exception.
below is the code:
CrmDisconnectionApplication
package com.wpits.crm.disconnection;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import com.wpits.crm.disconnection.quartzJob.DisconnectionCronTrigger;
#SpringBootApplication(scanBasePackages = { "com.wpits.crm" })
public class CrmDisconnectionApplication {
public static void main(String[] args) {
SpringApplication.run(CrmDisconnectionApplication.class, args);
DisconnectionCronTrigger disconnectionCronTrigger = DisconnectionCronTrigger.getInstance();
disconnectionCronTrigger.initialize();
}
}
DisconnectionCronTrigger
package com.wpits.crm.disconnection.quartzJob;
import org.quartz.CronScheduleBuilder;
import org.quartz.JobBuilder;
import org.quartz.JobDetail;
import org.quartz.Scheduler;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.quartz.impl.StdSchedulerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.wpits.crm.disconnection.config.YamlConfig;
#Component
public class DisconnectionCronTrigger {
#Autowired
private YamlConfig myConfig;
private static DisconnectionCronTrigger obj = null;
private DisconnectionCronTrigger() {}
public static DisconnectionCronTrigger getInstance() {
if(obj == null) {
obj = new DisconnectionCronTrigger();
}
return obj;
}
public void initialize() {
System.out.println("using environment: " + myConfig.getEnvironment());
System.out.println("name: " + myConfig.getName());
System.out.println("servers: " + myConfig.getServers());
System.out.println("hobies: "+myConfig.getHobies());
JobDetail job = JobBuilder.newJob(DisconnectionJob.class).withIdentity("DisconnectionJob", "group1").build();
Trigger trigger = TriggerBuilder.newTrigger().withIdentity("cronTrigger", "group1").withSchedule(CronScheduleBuilder.cronSchedule("0/10 * * * * ?")).build();
try {
Scheduler scheduler = new StdSchedulerFactory().getScheduler();
scheduler.start();
scheduler.scheduleJob(job, trigger);
}catch(Exception ex) {
ex.printStackTrace();
}
}
}
YamlConfig
package com.wpits.crm.disconnection.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import java.util.*;
#Configuration
#EnableConfigurationProperties
#ConfigurationProperties
public class YamlConfig {
private String name;
private String environment;
private List<String> servers = new ArrayList<>();
private List<String> hobies = new ArrayList<>();
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEnvironment() {
return environment;
}
public void setEnvironment(String environment) {
this.environment = environment;
}
public List<String> getServers() {
return servers;
}
public void setServers(List<String> servers) {
this.servers = servers;
}
public List<String> getHobies() {
return hobies;
}
public void setHobies(List<String> hobies) {
this.hobies = hobies;
}
}
I am getting null pointer exception for line System.out.println("using environment: " + myConfig.getEnvironment()); in class DisconnectionCronTrigger. Where am I getting it wrong. Please correct me..
The problem is this line
DisconnectionCronTrigger disconnectionCronTrigger = DisconnectionCronTrigger.getInstance();
In getInstance you are creating a new object using new. You should not do new, instead Autowire the bean or get it from Spring application context.
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(CrmDisconnectionApplication.class, args);
DisconnectionCronTrigger disconnectionCronTrigger = (DisconnectionCronTrigger)context.getBean("disconnectionCronTrigger");
disconnectionCronTrigger.initialize();
}
If you do it like this, then you will get an object will all the fields in the bean autowired. If you create a object using new, then you wont.

Query is always execute before than AOP in SpringBoot and MyBatis application for dynamic datasource

Here , I want to make a SpringBoot and MyBatis application use dynamic datasource by AOP; But the AOP is always execute after query from database, so switch datasource is invalid because select is finished.
All my code is in https://github.com/helloworlde/SpringBoot-DynamicDataSource/tree/aspect_dao
My dependence is
compile('org.mybatis.spring.boot:mybatis-spring-boot-starter:1.3.1')
compile('org.springframework.boot:spring-boot-starter-web')
compile('org.springframework.boot:spring-boot-starter-aop')
runtime('mysql:mysql-connector-java')
And application.properties
application.server.db.master.driver-class-name=com.mysql.jdbc.Driver
application.server.db.master.url=jdbc:mysql://localhost/redisapi?useSSL=false
application.server.db.master.port=3306
application.server.db.master.username=root
application.server.db.master.password=ihaveapen*^##
#application.server.db.master.database=123456
#
## application common config
application.server.db.slave.driver-class-name=com.mysql.jdbc.Driver
application.server.db.slave.url=jdbc:mysql:/localhost/redisapi2?useSSL=false
application.server.db.slave.port=3306
application.server.db.slave.username=root
application.server.db.slave.password=123456
#application.server.db.slave.database=redisapi
mybatis.type-aliases-package=cn.com.hellowood.dynamicdatasource.mapper
mybatis.mapper-locations=mappers/**Mapper.xml
Table
CREATE TABLE product(
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(50) NOT NULL,
price DOUBLE(10,2) NOT NULL DEFAULT 0
);
DataSourceConfigur.java
package cn.com.hellowood.dynamicdatasource.configuration;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
#Configuration
public class DataSourceConfigurer {
#Bean("master")
#Primary
#ConfigurationProperties(prefix = "application.server.db.master")
public DataSource master() {
return DataSourceBuilder.create().build();
}
#Bean("slave")
#ConfigurationProperties(prefix = "application.server.db.slave")
public DataSource slave() {
return DataSourceBuilder.create().build();
}
#Bean("dynamicDataSource")
public DataSource dynamicDataSource() {
DynamicRoutingDataSource dynamicRoutingDataSource = new DynamicRoutingDataSource();
Map<Object, Object> dataSourceMap = new HashMap<>(2);
dataSourceMap.put("master", master());
dataSourceMap.put("slave", slave());
// Set master datasource as default
dynamicRoutingDataSource.setDefaultTargetDataSource(master());
// Set master and slave datasource as target datasource
dynamicRoutingDataSource.setTargetDataSources(dataSourceMap);
// To put datasource keys into DataSourceContextHolder to judge if the datasource is exist
DynamicDataSourceContextHolder.dataSourceKeys.addAll(dataSourceMap.keySet());
return dynamicRoutingDataSource;
}
#Bean
#ConfigurationProperties(prefix = "mybatis")
public SqlSessionFactoryBean sqlSessionFactoryBean() {
SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
// Here is very important, if don't config this, will can't switch datasource
// put all datasource into SqlSessionFactoryBean, then will autoconfig SqlSessionFactory
sqlSessionFactoryBean.setDataSource(dynamicDataSource());
return sqlSessionFactoryBean;
}
#Bean
public PlatformTransactionManager transactionManager() {
return new DataSourceTransactionManager(dynamicDataSource());
}
}
DynamicRoutingDataSource.java
package cn.com.hellowood.dynamicdatasource.configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DynamicRoutingDataSource extends AbstractRoutingDataSource {
private final Logger logger = LoggerFactory.getLogger(getClass());
#Override
protected Object determineCurrentLookupKey() {
logger.info("Current DataSource is [{}]", DynamicDataSourceContextHolder.getDataSourceKey());
return DynamicDataSourceContextHolder.getDataSourceKey();
}
}
DynamicDataSourceContextHolder.java
package cn.com.hellowood.dynamicdatasource.configuration;
import java.util.ArrayList;
import java.util.List;
public class DynamicDataSourceContextHolder {
private static final ThreadLocal<String> contextHolder = new ThreadLocal<String>() {
#Override
protected String initialValue() {
return "master";
}
};
public static List<Object> dataSourceKeys = new ArrayList<>();
public static void setDataSourceKey(String key) {
contextHolder.set(key);
}
public static String getDataSourceKey() {
return contextHolder.get();
}
public static void clearDataSourceKey() {
contextHolder.remove();
}
public static boolean containDataSourceKey(String key) {
return dataSourceKeys.contains(key);
}
}
DynamicDataSourceAspect.java
package cn.com.hellowood.dynamicdatasource.configuration;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
#Aspect
#Order(-100) // To ensure execute before #Transactional
#Component
public class DynamicDataSourceAspect {
private static final Logger logger = LoggerFactory.getLogger(DynamicDataSourceAspect.class);
private final String QUERY_PREFIX = "select";
#Pointcut("execution( * cn.com.hellowood.dynamicdatasource.mapper.*.*(..))")
public void daoAspect() {
}
#Before("daoAspect()")
public void switchDataSource(JoinPoint point) {
if (point.getSignature().getName().startsWith(QUERY_PREFIX)) {
DynamicDataSourceContextHolder.setDataSourceKey("slave");
logger.info("Switch DataSource to [{}] in Method [{}]",
DynamicDataSourceContextHolder.getDataSourceKey(), point.getSignature());
}
}
#After("daoAspect())")
public void restoreDataSource(JoinPoint point) {
DynamicDataSourceContextHolder.clearDataSourceKey();
logger.info("Restore DataSource to [{}] in Method [{}]",
DynamicDataSourceContextHolder.getDataSourceKey(), point.getSignature());
}
}
And have Controller, Service and Dao for query, But although I set Order of aspect as -100, it still execute query before AOP, could anyone find where is wrong, Thank you very much.
This is log screenshot
Finally I fixed this issue, Because I injected Bean of DataSourceTransactionManager, So transaction will be open in Service, so the aspect of DAO is not work until transaction finished.
Delete this code:
#Bean
public PlatformTransactionManager transactionManager() {
return new DataSourceTransactionManager(dynamicDataSource());
}

Spring mongo soft delete

I need to implement soft delete functionality(Maintain a boolean field in table and filter all query based on this).
Below link has solution for hibernate only.
Handling soft-deletes with Spring JPA
Since my application is very old, I don't want to change each existing query. I am looking for solution like one place change in spring data classes.
Spring mongo data version: 1.5.0.RELEASE
Add Boolean Field active to every class which is mapped with Collection
set the same true for all valid Documents and false for non valid documnets
private Boolean active = Boolean.TRUE;
and can chnage your Query to
Long countByActiveTrueAndAccountStatusNot(AccountStatus status);
First Step. Override default methods like as findAll(), findById(), exists().... For this you should override mongoTemplate, it simple).
Add to your entities field "deletedAt":
#Document("costAreas")
#Getter
#Setter
#NoArgsConstructor
#AllArgsConstructor
#FieldDefaults(level = AccessLevel.PRIVATE)
#Builder
public class User{
#Id
String id;
String name;
LocalDateTime deletedAt;
}
PS: Filed "deletedAt" contains the date of deletion (if this field is null then document wasn't deleted).
Create CustomMongoTemplate:
import com.mongodb.client.MongoClient;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Objects;
public class CustomMongoTemplate extends MongoTemplate {
public CustomMongoTemplate(MongoTemplate mongoTemplate) {
super(mongoTemplate.getMongoDatabaseFactory());
}
public CustomMongoTemplate(MongoClient mongoClient, String databaseName) {
super(mongoClient, databaseName);
}
public CustomMongoTemplate(MongoDatabaseFactory mongoDbFactory) {
super(mongoDbFactory);
}
public CustomMongoTemplate(MongoDatabaseFactory mongoDbFactory, MongoConverter mongoConverter) {
super(mongoDbFactory, mongoConverter);
}
#Override
public <T> List<T> find(Query query, Class<T> entityClass, String collectionName) {
Assert.notNull(query, "Query must not be null!");
Assert.notNull(collectionName, "CollectionName must not be null!");
Assert.notNull(entityClass, "EntityClass must not be null!");
query.addCriteria(Criteria.where("deletedAt").exists(Boolean.FALSE));
return super.find(query, entityClass, collectionName);
}
#Nullable
#Override
public <T> T findById(Object id, Class<T> entityClass, String collectionName) {
T t = super.findById(id, entityClass, collectionName);
try {
Field field = entityClass.getDeclaredField("deletedAt");
field.setAccessible(Boolean.TRUE);
if (Objects.nonNull(field.get(t))) {
return null;
}
} catch (NoSuchFieldException | IllegalAccessException ignored) {
}
return t;
}
#Nullable
#Override
public <T> T findOne(Query query, Class<T> entityClass, String collectionName) {
Assert.notNull(query, "Query must not be null!");
Assert.notNull(entityClass, "EntityClass must not be null!");
Assert.notNull(collectionName, "CollectionName must not be null!");
query.addCriteria(Criteria.where("deletedAt").exists(Boolean.FALSE));
return super.findOne(query, entityClass, collectionName);
}
#Override
#SuppressWarnings("ConstantConditions")
public boolean exists(Query query, #Nullable Class<?> entityClass, String collectionName) {
if (query == null) {
throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null");
}
query.addCriteria(Criteria.where("deletedAt").exists(Boolean.FALSE));
return super.exists(query, entityClass, collectionName);
}
// You can also override ```delete()``` method, but I decided to not to do this
// Maybe here should add other methods: count, findAndModify and ect. It depends which methods you going to use.
}
Then create Bean in configuration class:
#Configuration
public class MyConfiguration {
//...
#Bean(name = "mongoTemplate")
CustomMongoTemplate customMongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
return new CustomMongoTemplate(databaseFactory, converter);
}
//...
}
And allow Spring to override default MongoTemplate bean. Add next thing to your application.properties file:
spring.main.allow-bean-definition-overriding=true
Replace delete() with set deletedAt:
// Deletion method
// Before
User = userRepository.findById(id);
userRepository.delete(user);
// Now
User = userRepository.findById(id);
user.setDeletedAt(LocalDateTime.now());
userRepository.save(user);
Second Step. Implement soft delete for method in Repositories (generated by JPA) like as findAllByEmail(String email), existsByNameAndUsername(String name, String username)....
Resource: https://blog.rpuch.com/2019/10/27/spring-data-mongo-soft-delete-repositories.html
SoftDeleteMongoQueryLookupStrategy
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor;
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.repository.core.NamedQueries;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import java.lang.reflect.Method;
public class SoftDeleteMongoQueryLookupStrategy implements QueryLookupStrategy {
private final QueryLookupStrategy strategy;
private final MongoOperations mongoOperations;
private final QueryMethodEvaluationContextProvider evaluationContextProvider;
public SoftDeleteMongoQueryLookupStrategy(QueryLookupStrategy strategy,
MongoOperations mongoOperations,
QueryMethodEvaluationContextProvider evaluationContextProvider) {
this.strategy = strategy;
this.mongoOperations = mongoOperations;
this.evaluationContextProvider = evaluationContextProvider;
}
#Override
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
NamedQueries namedQueries) {
RepositoryQuery repositoryQuery = strategy.resolveQuery(method, metadata, factory, namedQueries);
// revert to the standard behavior if requested
if (method.getAnnotation(SeesSoftlyDeletedRecords.class) != null) {
return repositoryQuery;
}
if (!(repositoryQuery instanceof PartTreeMongoQuery)) {
return repositoryQuery;
}
PartTreeMongoQuery partTreeQuery = (PartTreeMongoQuery) repositoryQuery;
return new SoftDeletePartTreeMongoQuery(partTreeQuery);
}
private Criteria notDeleted() {
return new Criteria().andOperator(
Criteria.where("deletedAt").exists(false)
);
}
private class SoftDeletePartTreeMongoQuery extends PartTreeMongoQuery {
SoftDeletePartTreeMongoQuery(PartTreeMongoQuery partTreeQuery) {
super(partTreeQuery.getQueryMethod(), mongoOperations, new SpelExpressionParser(), evaluationContextProvider);
}
#Override
protected Query createQuery(ConvertingParameterAccessor accessor) {
Query query = super.createQuery(accessor);
return withNotDeleted(query);
}
#Override
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
Query query = super.createCountQuery(accessor);
return withNotDeleted(query);
}
private Query withNotDeleted(Query query) {
return query.addCriteria(notDeleted());
}
}
}
SeesSoftlyDeletedRecords (if you marked method annotation then method will ignore soft-deletion)
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
#Retention(RetentionPolicy.RUNTIME)
#Target(ElementType.METHOD)
public #interface SeesSoftlyDeletedRecords {
}
SoftDeleteMongoRepositoryFactory
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import java.util.Optional;
public class SoftDeleteMongoRepositoryFactory extends MongoRepositoryFactory {
private final MongoOperations mongoOperations;
public SoftDeleteMongoRepositoryFactory(MongoOperations mongoOperations) {
super(mongoOperations);
this.mongoOperations = mongoOperations;
}
#Override
protected Optional<QueryLookupStrategy> getQueryLookupStrategy(QueryLookupStrategy.Key key,
QueryMethodEvaluationContextProvider evaluationContextProvider) {
Optional<QueryLookupStrategy> optStrategy = super.getQueryLookupStrategy(key,
evaluationContextProvider);
return Optional.of(createSoftDeleteQueryLookupStrategy(optStrategy.get(), evaluationContextProvider));
}
private SoftDeleteMongoQueryLookupStrategy createSoftDeleteQueryLookupStrategy(QueryLookupStrategy strategy,
QueryMethodEvaluationContextProvider evaluationContextProvider) {
return new SoftDeleteMongoQueryLookupStrategy(strategy, mongoOperations, evaluationContextProvider);
}
}
SoftDeleteMongoRepositoryFactoryBean
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
import java.io.Serializable;
public class SoftDeleteMongoRepositoryFactoryBean<T extends Repository<S, ID>, S, ID extends Serializable>
extends MongoRepositoryFactoryBean<T, S, ID> {
public SoftDeleteMongoRepositoryFactoryBean(Class<? extends T> repositoryInterface) {
super(repositoryInterface);
}
#Override
protected RepositoryFactorySupport getFactoryInstance(MongoOperations operations) {
return new SoftDeleteMongoRepositoryFactory(operations);
}
}
Add it to configuration
#Configuration
#EnableMongoRepositories(basePackages = {"path to package with your repositories"}, repositoryFactoryBeanClass = SoftDeleteMongoRepositoryFactoryBean.class)
public class MyConfiguration {
//...
}
Hope it will help someone)

Categories

Resources