I have the following DateTimeTypeHandler class:
//Copyright 2012 Lucas Libraro
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.common.mybatis.typehandler;
import org.apache.ibatis.type.JdbcType;
import org.apache.ibatis.type.MappedTypes;
import org.apache.ibatis.type.TypeHandler;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.sql.*;
#MappedTypes(DateTime.class)
public class DateTimeTypeHandler implements TypeHandler {
/* (non-Javadoc)
* #see org.apache.ibatis.type.TypeHandler#setParameter(java.sql.PreparedStatement, int, java.lang.Object, org.apache.ibatis.type.JdbcType)
*/
public void setParameter(PreparedStatement ps, int i, Object parameter, JdbcType jdbcType) throws SQLException {
if (parameter != null) {
ps.setTimestamp(i, new Timestamp(((DateTime) parameter).getMillis()));
} else {
ps.setTimestamp(i, null);
}
}
/* (non-Javadoc)
* #see org.apache.ibatis.type.TypeHandler#getResult(java.sql.ResultSet, java.lang.String)
*/
public Object getResult(ResultSet rs, String columnName) throws SQLException {
Timestamp ts = rs.getTimestamp(columnName);
if (ts != null) {
return new DateTime(ts.getTime(), DateTimeZone.UTC);
} else {
return null;
}
}
/* (non-Javadoc)
* #see org.apache.ibatis.type.TypeHandler#getResult(java.sql.CallableStatement, int)
*/
public Object getResult(CallableStatement cs, int columnIndex) throws SQLException {
Timestamp ts = cs.getTimestamp(columnIndex);
if (ts != null) {
return new DateTime(ts.getTime(), DateTimeZone.UTC);
} else {
return null;
}
}
/* (non-Javadoc)
* #see org.apache.ibatis.type.TypeHandler#getResult(java.sql.ResultSet, int)
*/
public Object getResult(ResultSet rs, int columnIndex) throws SQLException {
Timestamp ts = rs.getTimestamp(columnIndex);
if (ts != null) {
return new DateTime(ts.getTime(), DateTimeZone.UTC);
} else {
return null;
}
}
}
I am able to serialiaze some Java to Json, Json to java using the following beans :
DataBean
package com.common.config;
import com.common.business.user.Employee;
import com.common.config.yaml.DatabaseConfig;
import com.common.mybatis.typehandler.DateTimeTypeHandler;
import com.commonTools.model.DBObject;
import com.commonTools.model.Snoop;
import com.commonTools.model.SnoopDBObject;
import org.apache.ibatis.type.TypeHandler;
import org.joda.time.DateTime;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.SimpleDriverDataSource;
#Configuration
#Import(DatabaseConfig.class)
#MapperScan("com.persistence")
public class DataBean {
#Autowired DatabaseConfig _databaseConfig;
#Bean
public DateTimeTypeHandler dateTimeTypeHandler(){
return new DateTimeTypeHandler();
}
#Bean
public SimpleDriverDataSource dataSource() {
SimpleDriverDataSource dataSource = new SimpleDriverDataSource();
dataSource.setDriverClass(org.mariadb.jdbc.Driver.class);
dataSource.setUsername(_databaseConfig.getUsername());
dataSource.setPassword(_databaseConfig.getPassword());
dataSource.setUrl(_databaseConfig.getUrl());
return dataSource;
}
#Bean
public DataSourceTransactionManager transactionManager() {
return new DataSourceTransactionManager(dataSource());
}
#Bean
public SqlSessionFactoryBean sqlSessionFactory() throws Exception {
SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(dataSource());
sessionFactory.setTypeHandlers(new TypeHandler[] {
new DateTimeTypeHandler(),
});
sessionFactory.setTypeAliases(new Class[] {
DateTime.class,
Employee.class,
DBObject.class,
Snoop.class,
SnoopDBObject.class
});
return sessionFactory;
}
}
and JacksonBean
package com.common.config;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.joda.JodaModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
public class JacksonBean {
#Bean
public ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(MapperFeature.DEFAULT_VIEW_INCLUSION, true);
// # TEST 0 (TODO: see result when activated using JodaTime)
mapper.registerModule(new JodaModule());
mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS );
// mapper.setDateFormat(new SimpleDateFormat("EEE MMM dd yyyy HH:mm:ss 'GMT'ZZZ (z)"));
// # TEST 1 (Res: fail sometimes when too much depth)
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); // true default
// # TEST 2 (Res: too much depth)
mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY)
.setVisibility(PropertyAccessor.CREATOR, JsonAutoDetect.Visibility.ANY)
.setVisibility(PropertyAccessor.SETTER, JsonAutoDetect.Visibility.NONE)
.setVisibility(PropertyAccessor.GETTER, JsonAutoDetect.Visibility.NONE)
.setVisibility(PropertyAccessor.IS_GETTER, JsonAutoDetect.Visibility.NONE);
return mapper;
}
}
This work well, I am able to serialize the DateTime object but this does not work when I execute the following EmployeeServiceTest :
package com.common.service.user;
import com.common.business.user.Employee;
import com.common.config.DataBean;
import com.commonTools.SessionIdentifierGenerator;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.ConfigFileApplicationContextInitializer;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.util.List;
import static org.junit.Assert.*;
#RunWith(SpringJUnit4ClassRunner.class)
#ContextConfiguration(initializers = ConfigFileApplicationContextInitializer.class, classes = {DataBean.class, EmployeeService.class})
public class EmployeeServiceTest implements EmployeeServiceDAO {
private static final String SAMPLE_NAME = "JUnitTestUser_";
#Autowired
private EmployeeService _employeeService;
private String getRandomName(){
return SAMPLE_NAME + new SessionIdentifierGenerator().nextSessionId();
}
#Test
public void test_getAll(){
List<Employee> employeeList = this.getAll();
Employee testEmployee = new Employee();
testEmployee.setFirstName(getRandomName());
testEmployee.setLastName(getRandomName());
this.save(testEmployee);
assertNotNull("should have an id", testEmployee.getId());
List<Employee> employeeListIncremented = this.getAll();
assertEquals("Should have +1 in list", employeeList.size() + 1 , employeeListIncremented.size());
this.removeById(testEmployee.getId());
assertTrue("should be deleted", this.getById(testEmployee.getId()) == null);
}
#Test
public void test_getById(){
Employee testEmployee = new Employee();
testEmployee.setFirstName(getRandomName());
testEmployee.setLastName(getRandomName());
this.save(testEmployee);
assertNotNull("should have an id", testEmployee.getId());
Employee employeeById = this.getById(testEmployee.getId());
assertEquals("should be equal id", employeeById.getId(), testEmployee.getId());
this.removeById(testEmployee.getId());
assertTrue("should be deleted", this.getById(testEmployee.getId()) == null);
}
#Test
public void test_getFromFirstName(){
Employee testEmployee = new Employee();
testEmployee.setFirstName(getRandomName());
testEmployee.setLastName(getRandomName());
this.save(testEmployee);
assertNotNull("should have an id", testEmployee.getId());
Employee employeeFromFirstName = this.getFromFirstName(testEmployee.getFirstName());
assertEquals("should be equal id", employeeFromFirstName.getId(), testEmployee.getId());
this.removeById(testEmployee.getId());
assertTrue("should be deleted", this.getById(testEmployee.getId()) == null);
}
#Test
public void test_save() {
Employee testEmployee = new Employee();
testEmployee.setFirstName(getRandomName());
testEmployee.setLastName(getRandomName());
this.save(testEmployee);
assertNotNull("should have an id", testEmployee.getId());
// assertTrue("should have a creation datetime", null != testEmployee.getCreation().getDateTime() );
String firstNameTmp = getRandomName();
testEmployee.setFirstName(firstNameTmp);
this.save(testEmployee);
Employee editedEmployee = this.getById(testEmployee.getId());
assertEquals("should have first name \""+firstNameTmp+"\"", firstNameTmp, editedEmployee.getFirstName());
this.removeById(testEmployee.getId());
assertTrue("should be deleted", this.getById(testEmployee.getId()) == null);
}
#Test
public void test_removeById() {
test_save();
}
public List<Employee> getAll() {
return _employeeService.getAll();
}
public Employee getById(Integer id) {
return _employeeService.getById(id);
}
public Employee getFromFirstName(String firstName) {
return _employeeService.getFromFirstName(firstName);
}
public Employee save(Employee employee) {
_employeeService.save(employee);
return employee;
}
public void removeById(Integer id) {
_employeeService.removeById(id);
}
}
These questions comes in my mind :
It's not very handy to serialize #Configuration class like DatabaseConfig, this because Spring also include it's own properties in the serialization chain.
The UnitTest fail because of // assertTrue("should have a creation datetime", null != testEmployee.getCreation().getDateTime() );, in the test , getCreation is null while it shouldn't, how should I initialize the Jackson to DateTime Jodatime mapping during the test .
I also notice the test doesn't rollback the database when it fails. I used to do Unit Testing with javascript and I was used to the before(), after() method that do post install and pre install of the test, is there a conveniant way to achieve that.
Beside all I have done here, is this the most appropriate way to design a RESTful api ?
Related
This question already has answers here:
Spring Batch: Writing data to multiple files with dynamic File Name
(2 answers)
Closed 2 years ago.
This question might seem to be a duplicate of this but it is not
My requirement is to read data from db using JdbcPagingItemReader and process individual records for some additional processing and in writer create individual json files for each processed item with file name id_of_record_json_fie.txt
For example if reader reads 100 records then 100 JSON files has to be created
What is the best way to do this, Can we use spring batch for this ?
Update 1-:
As per #Mahmoud answer, tasklet can be used , I have also tried implementing custom itemwriter in a chunk oriented step , this also seems to work
#Override
public void write(final List<? extends Person> persons) throws Exception {
for (Person person: persons) {
objectMapper.writeValue(new File("D:/cp/dataTwo.json"), person);
}
}
Using a chunk-oriented tasklet won't work, because there will be a single item writer on which the resource is set upfront and will be fixed during the entire step. Using a composite item writer might work but you need to know how many distinct writers to create and configure upfront.
The most straightforward option I see is to use a tasklet, something like:
import java.util.Collections;
import java.util.HashMap;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.database.JdbcPagingItemReader;
import org.springframework.batch.item.database.Order;
import org.springframework.batch.item.database.builder.JdbcPagingItemReaderBuilder;
import org.springframework.batch.item.file.FlatFileItemWriter;
import org.springframework.batch.item.file.builder.FlatFileItemWriterBuilder;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
#Configuration
#EnableBatchProcessing
public class MyJob {
#Bean
public JdbcPagingItemReader<Person> itemReader() {
return new JdbcPagingItemReaderBuilder<Person>()
.name("personItemReader")
.dataSource(dataSource())
.beanRowMapper(Person.class)
.selectClause("select *")
.fromClause("from person")
.sortKeys(new HashMap<String, Order>() {{ put("id", Order.DESCENDING);}})
.build();
}
#Bean
public Job job(JobBuilderFactory jobs, StepBuilderFactory steps) {
return jobs.get("job")
.start(steps.get("step")
.tasklet(new MyTasklet(itemReader()))
.build())
.build();
}
private static class MyTasklet implements Tasklet {
private boolean readerInitialized;
private JdbcPagingItemReader<Person> itemReader;
public MyTasklet(JdbcPagingItemReader<Person> itemReader) {
this.itemReader = itemReader;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext();
if (!readerInitialized) {
itemReader.open(executionContext);
readerInitialized = true;
}
Person person = itemReader.read();
if (person == null) {
itemReader.close();
return RepeatStatus.FINISHED;
}
// process the item
process(person);
// write the item in its own file (dynamically generated at runtime)
write(person, executionContext);
// save current state in execution context: in case of restart after failure, the job would resume where it left off.
itemReader.update(executionContext);
return RepeatStatus.CONTINUABLE;
}
private void process(Person person) {
// do something with the item
}
private void write(Person person, ExecutionContext executionContext) throws Exception {
FlatFileItemWriter<Person> itemWriter = new FlatFileItemWriterBuilder<Person>()
.resource(new FileSystemResource("person" + person.getId() + ".csv"))
.name("personItemWriter")
.delimited()
.names("id", "name")
.build();
itemWriter.open(executionContext);
itemWriter.write(Collections.singletonList(person));
itemWriter.close();
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
#Bean
public DataSource dataSource() {
EmbeddedDatabase embeddedDatabase = new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.H2)
.addScript("/org/springframework/batch/core/schema-drop-h2.sql")
.addScript("/org/springframework/batch/core/schema-h2.sql")
.build();
JdbcTemplate jdbcTemplate = new JdbcTemplate(embeddedDatabase);
jdbcTemplate.execute("create table person (id int primary key, name varchar(20));");
for (int i = 1; i <= 10; i++) {
jdbcTemplate.execute(String.format("insert into person values (%s, 'foo%s');", i, i));
}
return embeddedDatabase;
}
static class Person {
private int id;
private String name;
public Person() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String toString() {
return "Person{id=" + id + ", name='" + name + '\'' + '}';
}
}
}
This example reads 10 persons from a db table and generates 10 csv files (person1.csv, person2.csv, etc)
I recently started working on springBoot projects.
I wrote a sample program where I am fetching data from the DB, modifying it and storing it back in the DB.
The problem I am facing is, I am able to get the data from the DB but when saving it back, I am getting the below exception.
org.springframework.dao.InvalidDataAccessApiUsageException: no transaction is in progress; nested exception is
javax.persistence.TransactionRequiredException: no transaction is in progress at
org.springframework.orm.jpa.EntityManagerFactoryUtils.convertJpaAccessExceptionIfPossible(EntityManagerFactoryUtils.java:413) at
org.springframework.orm.jpa.vendor.HibernateJpaDialect.translateExceptionIfPossible(HibernateJpaDialect.java:246)
Below is my Code.
DataConfiguraiton.java
import java.util.Properties;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
*
* This class contains code to configure database.
* #author satish
*
*/
#Configuration
#EntityScan(basePackages= {"com.tushar.common.model"})
#ComponentScan(basePackages= {"com.tushar.common.model"})
#EnableJpaRepositories(basePackages={"com"})
#EnableTransactionManagement
public class DataConfiguration {
#Value("${spring.datasource.driver-class-name}")
private String driverClassName;
#Value("${spring.datasource.url}")
private String url;
#Value("${spring.datasource.username}")
private String username;
#Value("${spring.datasource.password}")
private String password;
#Value("${spring.datasource.dialect}")
private String dialect;
/**
*
* It will scan the package where our entities appears.
* #param dataSource
* #return
*/
#Bean
#Autowired
public LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource) {
HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
vendorAdapter.setGenerateDdl(true);
Properties jpaProperties = new Properties();
jpaProperties.setProperty("hibernate.show_sql", "true");
jpaProperties.setProperty("hibernate.dialect", dialect);
jpaProperties.setProperty("hibernate.hbm2ddl.auto", "update");
LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean =
new LocalContainerEntityManagerFactoryBean();
localContainerEntityManagerFactoryBean.setJpaVendorAdapter(vendorAdapter);
localContainerEntityManagerFactoryBean.setPackagesToScan(new String[] {"com.att.pricerd.discountmanagement.model"});
localContainerEntityManagerFactoryBean.setDataSource(dataSource);
localContainerEntityManagerFactoryBean.setJpaProperties(jpaProperties);
return localContainerEntityManagerFactoryBean;
}
/**
*
* It will set the database properties to the data Source.
* #return
*/
#Bean
public DataSource dataSource(){
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driverClassName);
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
return dataSource;
}
#Bean
#Autowired
public JpaTransactionManager transactionManager(LocalContainerEntityManagerFactoryBean emf) throws Exception {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf.getObject());
transactionManager.setEntityManagerFactory(emf.getNativeEntityManagerFactory());
return transactionManager;
}
}
Repository
#Repository
public interface EmployeeRepository extends JpaRepository<Employee,Long> {
}
DAO Class
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
#Component
public class EmployeeDataManagementDaoImpl implements EmployeeDataManagementDao {
private static final Logger LOG = LoggerFactory.getLogger(EmployeeDataManagementDaoImpl.class);
#Autowired
private EmployeeDataManagmentUtil EmployeeDataManagmentUtil;
#Autowired
private SalesEmployeesRepository salesEmployeesRepository;
#Autowired
private EmployeeRepository employeeRepository;
#PersistenceContext
private EntityManager em;
#Override
public void addEmployee(EmployeeDetailsRequestVo EmployeeRequest) {
convertAndSaveSalesEmployee(EmployeeRequest);
}
/**
* Fetch data from DB, update the inital and maximum Employee and
* save it back to DB.
* #throws DataNotFoundException
*
*/
#Override
public void changeEmployee(List<Employee> Employees) throws EmployeeManagementException {
for (Employee employee : Employees) {
List<Employee> EmployeesDB;
try {
EmployeesDB = getEmployeeFromDB(employee);
} catch (DataNotFoundException e) {
List<String> errorMessage = new ArrayList<>();
errorMessage.add(e.getMessage());
LOG.error(e.getMessage(),e);
throw new EmployeeManagementException(errorMessage);
}
for (Employee employee : EmployeesDB) {
if (employee.getMaxEmployee() != null) {
Employee.setMaxEmployee(employee.getMaxEmployee());
}
if (employee.getInitialEmployee() != null) {
Employee.setInitialEmployee(employee.getInitialEmployee());
}
employeeRepository.saveAndFlush(Employee);
}
}
}
/**
* This method is used to get the Employee details from DB.
*
* #param employee
* #return List<Employee>
* #throws DataNotFoundException
*/
private List<Employee> getEmployeeFromDB(Employee employee)
throws DataNotFoundException {
List<Employee> EmployeesDB = findByAllEmployeesFilters(employee.getempId(),
employee.getyearsExp(), employee.getdeptLevInd(), employee.getsalary(),
employee.getaddress(), employee.getCountryCd(), employee.getpinCode());
if (EmployeesDB.isEmpty()) {
String errCode = ""; // error code for data not found, yet to be
// decided.
LOG.error("ERROR CODE :: {}", errCode);
throw new DataNotFoundException(errCode);
}
return EmployeesDB;
}
/**
* This method will update the end date Employee
* #param List<Employee>
*/
#Override
public void inactivateEmployee(List<Employee> Employees)
throws EmployeeManagementException {
for (Employee employee : Employees) {
List<Employee> employeesDB;
try {
employeesDB = getEmployeeFromDB(employee);
} catch (DataNotFoundException e) {
List<String> errorMessage = new ArrayList<>();
errorMessage.add(e.getMessage());
LOG.error(e.getMessage(),e);
throw new EmployeeManagementException(errorMessage);
}
for (Employee employee : EmployeesDB) {
if (employee.getEmployeeEndDate() != null) {
employee.setEmployeeEndDate(employee.getEmployeeEndDate());
}
//
employeeRepository.saveAndFlush(Employee);
}
}
}
/**
*
* #param empId
* #param yearsExp
* #param bigDecimal
* #param salary
* #param regionId
* #param countryCd
* #param pinCode
* #return
*/
private List<Employee> findByAllEmployeeFilters(BigDecimal empId, BigDecimal yearsExp,
BigDecimal bigDecimal, BigDecimal salary, String regionId, String countryCd, String pinCode) {
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
CriteriaQuery<Employee> criteriaQuery = criteriaBuilder
.createQuery(Employee.class);
Root<Employee> root = criteriaQuery.from(Employee.class);
criteriaQuery.where(criteriaBuilder.equal(root.get("empId"), empId),
criteriaBuilder.equal(root.get("deptLevInd"), bigDecimal),
criteriaBuilder.equal(root.get("yearsExp"), yearsExp), criteriaBuilder.equal(root.get("salary"), salary),
criteriaBuilder.equal(root.get("address"), regionId),
criteriaBuilder.equal(root.get("countryCd"), countryCd),
criteriaBuilder.equal(root.get("pinCode"), pinCode));
return em.createQuery(criteriaQuery).getResultList();
}
}
The getEmployeeFromDB gets me the values from DB but saveAndFlush is giving me the exception.
Actualy Bedla is rigth, you should use #Transactional. What i want to add that method changeEmployee should be declared in Service class. Correct desing is create EmployeeService class and move method changeEmployee to it, and annotate service with #Transactional. Generaly dao should contain only find methods to load from db and update/save methods to save.
I want to trim all the form string fields trim automatically (trailing & leading spaces only)
Suppose if I pass FirstName = " robert "
Expected: "robert"
Controller class having following code :
#InitBinder
public void initBinder ( WebDataBinder binder )
{
StringTrimmerEditor stringtrimmer = new StringTrimmerEditor(true);
binder.registerCustomEditor(String.class, stringtrimmer);
}
#RequestMapping(value = "/createuser", method = RequestMethod.POST)
public Boolean createUser(#RequestBody UserAddUpdateParam userAddUpdateParam) throws Exception {
return userFacade.createUser(userAddUpdateParam);
}
when I debug the code, It's coming into #InitBinder but not trimming bean class string fields.
The annotation #InitBinder doesn't work with #RequestBody, you have to use it with the #ModelAttribute annotation
You can find more information in the Spring documentation:
https://docs.spring.io/spring/docs/current/spring-framework-reference/html/mvc.html
To add this feature to all JSON submitted in post and received in RequestBody you can have following WebMvcConfigurer in place.
import java.util.List;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
#Configuration
public class HttpMessageConvertor implements WebMvcConfigurer {
#Override
public void extendMessageConverters(List<HttpMessageConverter<?>> converters) {
converters.add(mappingJackson2HttpMessageConverter());
}
#Bean
public MappingJackson2HttpMessageConverter mappingJackson2HttpMessageConverter() {
MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SimpleModule module = new SimpleModule();
module.addDeserializer(String.class, new StringWithoutSpaceDeserializer(String.class));
mapper.registerModule(module);
converter.setObjectMapper(mapper);
return converter;
}
}
StringWithoutSpaceDeserializer class as :
import java.io.IOException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
public class StringWithoutSpaceDeserializer extends StdDeserializer<String> {
/**
*
*/
private static final long serialVersionUID = -6972065572263950443L;
protected StringWithoutSpaceDeserializer(Class<String> vc) {
super(vc);
}
#Override
public String deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
return p.getText() != null ? p.getText().trim() : null;
}
}
Try out the below code:
#InitBinder
public void setAllowedFields(WebDataBinder dataBinder) {
dataBinder.registerCustomEditor(String.class, new PropertyEditorSupport() {
#Override
public void setAsText(String text) {
if (text == null) {
return;
}
setValue(text);
}
#Override
public String getAsText() {
Object value = getValue();
return (value != null ? value.trim().toString() : "");
}
});
}
I'm fairly new to Spring (the Neo4j side), and I am having trouble #AutoWire-ing my repository.
this is my repo:
package org.jarivm.relationGraph.objects.repositories;
public interface EmployeeRepository extends GraphRepository<Employee> {
#Query("MATCH a=(:Employee)-[:WORKED_ON]->(p:Project) WHERE id(p)={0} RETURN a")
Iterable<Employee> getTeamMates(Project client);
}
my test class:
package org.jarivm.relationGraph;
import org.apache.commons.collections4.set.ListOrderedSet;
import org.jarivm.relationGraph.objects.domains.Employee;
import org.jarivm.relationGraph.objects.domains.Project;
import org.jarivm.relationGraph.objects.repositories.EmployeeRepository;
import org.jarivm.relationGraph.utilities.NodeProperties;
import org.junit.After;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Iterator;
/**
* #author Jari Van Melckebeke
* #since 02.09.16
*/
#FixMethodOrder(MethodSorters.JVM)
public class Tests extends Application {
#Autowired
private Facade facade;
#Autowired
private EmployeeRepository employeeRepository;
#Before
public void setUp() throws Exception {
facade = new Facade();
}
#After
public void tearDown() throws Exception {
facade.tearDown();
}
/*
#Test
public void persistedEmployeeShouldBeRetrievableFromGraphDB() {
Employee employee = new Employee("john", "adams");
//System.out.println(session.getTransaction().status());
if (!facade.findEmployeeByProperty("name", employee.getName()).iterator().hasNext()) {
facade.commit(employee);
Employee foundHim = facade.findEmployeeByProperty("name", employee.getName()).iterator().next();
assert foundHim.getId().equals(employee.getId());
assert foundHim.getName().equals(employee.getName());
}
}
#Test
public void persistedChainShouldBeRetrievableFromGraphDB() {
Employee employee = new Employee("john", "myles");
Client client = new Client();
Sector sector = new Sector();
Project project = new Project();
client.setName("Real Dolmen");
project.setClient(client);
project.setCost(100.0);
project.setName("project highrise");
Set<Employee> set = new ListOrderedSet<Employee>();
set.add(employee);
project.setTeam(set);
sector.setName("game");
client.setSector(sector);
facade.commit(sector);
facade.commit(employee);
facade.commit(client);
facade.commit(project);
Client foundHim = facade.findClientByProperty("name", client.getName()).iterator().next();
assert foundHim.getId().equals(client.getId());
assert foundHim.getName().equals(client.getName());
}
#Test
public void projectShouldBeInsertableAlone() {
Project project = new Project();
project.setName("random");
project.setLanguage("Java");
facade.commit(project);
Project foundHim = facade.findProjectByProperty("name", project.getName()).iterator().next();
assert foundHim.getId().equals(project.getId());
}
#Test
public void clientShouldBeInsertableAlone() {
Client client = new Client();
client.setName("Colruyt");
facade.commit(client);
Client foundHim = facade.findClientByProperty("name", client.getName()).iterator().next();
assert foundHim.getId().equals(client.getId());
}*/
#Test
public void createdNodesShoudBeEditable() {
Iterator<Employee> employees = facade.findEmployeeByProperty("name", "john").iterator();
Project project = facade.findProjectByProperty("name", "random").iterator().next();
while (employees.hasNext()) {
Employee e = employees.next();
if (project.getTeam() == null)
project.setTeam(new ListOrderedSet<Employee>());
project.getTeam().add(e);
}
facade.commit(project);
}
package org.jarivm.relationGraph;
#Autowired
private EmployeeRepository employeeRepository;
#Test
public void teamMatesShouldBeViewable() {
Project p = facade.findProjectByProperty("name", "Matsoft").iterator().next();
System.out.println(p);
System.out.println(employeeRepository);
Iterable<Employee> e = employeeRepository.getTeamMates(p);
System.out.println(e.iterator());
}
}
and my Application.java class:
package org.jarivm.relationGraph;
import org.neo4j.ogm.session.SessionFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.neo4j.config.Neo4jConfiguration;
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
* #author Jari Van Melckebeke
* #since 23.09.16
*/
#EnableTransactionManagement
#ComponentScan(basePackages = {"org.jarivm.relationGraph"})
#Configuration
#EnableNeo4jRepositories(basePackages = "org.jarivm.relationGraph.objects.repositories.EmployeeRepository")
public class Application extends Neo4jConfiguration {
public static final String URL = System.getenv("NEO4J_URL") != null ? System.getenv("NEO4J_URL") : "http://localhost:7474";
#Bean
public org.neo4j.ogm.config.Configuration getConfiguration() {
org.neo4j.ogm.config.Configuration config = new org.neo4j.ogm.config.Configuration();
config
.driverConfiguration()
.setDriverClassName("org.neo4j.ogm.drivers.http.driver.HttpDriver")
.setURI(URL)
.setCredentials("neo4j", "mypassword");
return config;
}
#Override
public SessionFactory getSessionFactory() {
return new SessionFactory(getConfiguration(), "org.jarivm.relationGraph.objects.domains");
}
}
The #autowire did never work with this program so I do not know what the problem is...
thank's in advance,
Jari Van Melckebeke
I think your Tests class should not extend Application, but instead be annotated with RunsWith - something like (untested):
#RunWith(SpringJUnit4ClassRunner.class)
#ContextConfiguration(classes=org.jarivm.relationGraph.Application.class, loader=AnnotationConfigContextLoader.class
public class Tests {
for more information, see the section titled Integration Testing with #Configuration Classes:
https://spring.io/blog/2011/06/21/spring-3-1-m2-testing-with-configuration-classes-and-profiles
I need to implement soft delete functionality(Maintain a boolean field in table and filter all query based on this).
Below link has solution for hibernate only.
Handling soft-deletes with Spring JPA
Since my application is very old, I don't want to change each existing query. I am looking for solution like one place change in spring data classes.
Spring mongo data version: 1.5.0.RELEASE
Add Boolean Field active to every class which is mapped with Collection
set the same true for all valid Documents and false for non valid documnets
private Boolean active = Boolean.TRUE;
and can chnage your Query to
Long countByActiveTrueAndAccountStatusNot(AccountStatus status);
First Step. Override default methods like as findAll(), findById(), exists().... For this you should override mongoTemplate, it simple).
Add to your entities field "deletedAt":
#Document("costAreas")
#Getter
#Setter
#NoArgsConstructor
#AllArgsConstructor
#FieldDefaults(level = AccessLevel.PRIVATE)
#Builder
public class User{
#Id
String id;
String name;
LocalDateTime deletedAt;
}
PS: Filed "deletedAt" contains the date of deletion (if this field is null then document wasn't deleted).
Create CustomMongoTemplate:
import com.mongodb.client.MongoClient;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Objects;
public class CustomMongoTemplate extends MongoTemplate {
public CustomMongoTemplate(MongoTemplate mongoTemplate) {
super(mongoTemplate.getMongoDatabaseFactory());
}
public CustomMongoTemplate(MongoClient mongoClient, String databaseName) {
super(mongoClient, databaseName);
}
public CustomMongoTemplate(MongoDatabaseFactory mongoDbFactory) {
super(mongoDbFactory);
}
public CustomMongoTemplate(MongoDatabaseFactory mongoDbFactory, MongoConverter mongoConverter) {
super(mongoDbFactory, mongoConverter);
}
#Override
public <T> List<T> find(Query query, Class<T> entityClass, String collectionName) {
Assert.notNull(query, "Query must not be null!");
Assert.notNull(collectionName, "CollectionName must not be null!");
Assert.notNull(entityClass, "EntityClass must not be null!");
query.addCriteria(Criteria.where("deletedAt").exists(Boolean.FALSE));
return super.find(query, entityClass, collectionName);
}
#Nullable
#Override
public <T> T findById(Object id, Class<T> entityClass, String collectionName) {
T t = super.findById(id, entityClass, collectionName);
try {
Field field = entityClass.getDeclaredField("deletedAt");
field.setAccessible(Boolean.TRUE);
if (Objects.nonNull(field.get(t))) {
return null;
}
} catch (NoSuchFieldException | IllegalAccessException ignored) {
}
return t;
}
#Nullable
#Override
public <T> T findOne(Query query, Class<T> entityClass, String collectionName) {
Assert.notNull(query, "Query must not be null!");
Assert.notNull(entityClass, "EntityClass must not be null!");
Assert.notNull(collectionName, "CollectionName must not be null!");
query.addCriteria(Criteria.where("deletedAt").exists(Boolean.FALSE));
return super.findOne(query, entityClass, collectionName);
}
#Override
#SuppressWarnings("ConstantConditions")
public boolean exists(Query query, #Nullable Class<?> entityClass, String collectionName) {
if (query == null) {
throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null");
}
query.addCriteria(Criteria.where("deletedAt").exists(Boolean.FALSE));
return super.exists(query, entityClass, collectionName);
}
// You can also override ```delete()``` method, but I decided to not to do this
// Maybe here should add other methods: count, findAndModify and ect. It depends which methods you going to use.
}
Then create Bean in configuration class:
#Configuration
public class MyConfiguration {
//...
#Bean(name = "mongoTemplate")
CustomMongoTemplate customMongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
return new CustomMongoTemplate(databaseFactory, converter);
}
//...
}
And allow Spring to override default MongoTemplate bean. Add next thing to your application.properties file:
spring.main.allow-bean-definition-overriding=true
Replace delete() with set deletedAt:
// Deletion method
// Before
User = userRepository.findById(id);
userRepository.delete(user);
// Now
User = userRepository.findById(id);
user.setDeletedAt(LocalDateTime.now());
userRepository.save(user);
Second Step. Implement soft delete for method in Repositories (generated by JPA) like as findAllByEmail(String email), existsByNameAndUsername(String name, String username)....
Resource: https://blog.rpuch.com/2019/10/27/spring-data-mongo-soft-delete-repositories.html
SoftDeleteMongoQueryLookupStrategy
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor;
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.repository.core.NamedQueries;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import java.lang.reflect.Method;
public class SoftDeleteMongoQueryLookupStrategy implements QueryLookupStrategy {
private final QueryLookupStrategy strategy;
private final MongoOperations mongoOperations;
private final QueryMethodEvaluationContextProvider evaluationContextProvider;
public SoftDeleteMongoQueryLookupStrategy(QueryLookupStrategy strategy,
MongoOperations mongoOperations,
QueryMethodEvaluationContextProvider evaluationContextProvider) {
this.strategy = strategy;
this.mongoOperations = mongoOperations;
this.evaluationContextProvider = evaluationContextProvider;
}
#Override
public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory,
NamedQueries namedQueries) {
RepositoryQuery repositoryQuery = strategy.resolveQuery(method, metadata, factory, namedQueries);
// revert to the standard behavior if requested
if (method.getAnnotation(SeesSoftlyDeletedRecords.class) != null) {
return repositoryQuery;
}
if (!(repositoryQuery instanceof PartTreeMongoQuery)) {
return repositoryQuery;
}
PartTreeMongoQuery partTreeQuery = (PartTreeMongoQuery) repositoryQuery;
return new SoftDeletePartTreeMongoQuery(partTreeQuery);
}
private Criteria notDeleted() {
return new Criteria().andOperator(
Criteria.where("deletedAt").exists(false)
);
}
private class SoftDeletePartTreeMongoQuery extends PartTreeMongoQuery {
SoftDeletePartTreeMongoQuery(PartTreeMongoQuery partTreeQuery) {
super(partTreeQuery.getQueryMethod(), mongoOperations, new SpelExpressionParser(), evaluationContextProvider);
}
#Override
protected Query createQuery(ConvertingParameterAccessor accessor) {
Query query = super.createQuery(accessor);
return withNotDeleted(query);
}
#Override
protected Query createCountQuery(ConvertingParameterAccessor accessor) {
Query query = super.createCountQuery(accessor);
return withNotDeleted(query);
}
private Query withNotDeleted(Query query) {
return query.addCriteria(notDeleted());
}
}
}
SeesSoftlyDeletedRecords (if you marked method annotation then method will ignore soft-deletion)
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
#Retention(RetentionPolicy.RUNTIME)
#Target(ElementType.METHOD)
public #interface SeesSoftlyDeletedRecords {
}
SoftDeleteMongoRepositoryFactory
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory;
import org.springframework.data.repository.query.QueryLookupStrategy;
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
import java.util.Optional;
public class SoftDeleteMongoRepositoryFactory extends MongoRepositoryFactory {
private final MongoOperations mongoOperations;
public SoftDeleteMongoRepositoryFactory(MongoOperations mongoOperations) {
super(mongoOperations);
this.mongoOperations = mongoOperations;
}
#Override
protected Optional<QueryLookupStrategy> getQueryLookupStrategy(QueryLookupStrategy.Key key,
QueryMethodEvaluationContextProvider evaluationContextProvider) {
Optional<QueryLookupStrategy> optStrategy = super.getQueryLookupStrategy(key,
evaluationContextProvider);
return Optional.of(createSoftDeleteQueryLookupStrategy(optStrategy.get(), evaluationContextProvider));
}
private SoftDeleteMongoQueryLookupStrategy createSoftDeleteQueryLookupStrategy(QueryLookupStrategy strategy,
QueryMethodEvaluationContextProvider evaluationContextProvider) {
return new SoftDeleteMongoQueryLookupStrategy(strategy, mongoOperations, evaluationContextProvider);
}
}
SoftDeleteMongoRepositoryFactoryBean
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean;
import org.springframework.data.repository.Repository;
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
import java.io.Serializable;
public class SoftDeleteMongoRepositoryFactoryBean<T extends Repository<S, ID>, S, ID extends Serializable>
extends MongoRepositoryFactoryBean<T, S, ID> {
public SoftDeleteMongoRepositoryFactoryBean(Class<? extends T> repositoryInterface) {
super(repositoryInterface);
}
#Override
protected RepositoryFactorySupport getFactoryInstance(MongoOperations operations) {
return new SoftDeleteMongoRepositoryFactory(operations);
}
}
Add it to configuration
#Configuration
#EnableMongoRepositories(basePackages = {"path to package with your repositories"}, repositoryFactoryBeanClass = SoftDeleteMongoRepositoryFactoryBean.class)
public class MyConfiguration {
//...
}
Hope it will help someone)