I am getting the below error when I try to run a spring batch.
java.lang.IllegalStateException: Failed to execute CommandLineRunner
at org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:779) ~[spring-boot-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.SpringApplication.callRunners(SpringApplication.java:760) ~[spring-boot-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.SpringApplication.afterRefresh(SpringApplication.java:747) ~[spring-boot-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:315) ~[spring-boot-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at com.amhi.care.claims.query.batch.QueryBatchApplication.main(QueryBatchApplication.java:15) [classes/:na]
Caused by: java.lang.IndexOutOfBoundsException: Index: 0, Size: 0
at java.util.ArrayList.rangeCheck(ArrayList.java:657) ~[na:1.8.0_192]
at java.util.ArrayList.get(ArrayList.java:433) ~[na:1.8.0_192]
at org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.getNextJobParameters(JobLauncherCommandLineRunner.java:143) ~[spring-boot-autoconfigure-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.execute(JobLauncherCommandLineRunner.java:212) ~[spring-boot-autoconfigure-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.executeLocalJobs(JobLauncherCommandLineRunner.java:231) ~[spring-boot-autoconfigure-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.launchJobFromProperties(JobLauncherCommandLineRunner.java:123) ~[spring-boot-autoconfigure-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherCommandLineRunner.run(JobLauncherCommandLineRunner.java:117) ~[spring-boot-autoconfigure-1.5.3.RELEASE.jar:1.5.3.RELEASE]
at org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:776) ~[spring-boot-1.5.3.RELEASE.jar:1.5.3.RELEASE]
... 4 common frames omitted
Code:
#Configuration
#EnableBatchProcessing
#EnableScheduling
#EnableTransactionManagement
#ComponentScan(QueryBatchConstants.COMPONENT_SCAN_PACKAGE)
#Import(DataSourcecConfiguration.class)
public class QueryBatchConfiguration {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
public Job reminderJob() {
return jobBuilderFactory.get("reminderJob").flow(step()).next(closureStep()).end().build();
}
#Bean
public Step step() {
return stepBuilderFactory.get("step").tasklet(tasklet()).build();
}
#Bean
public Step closureStep() {
return stepBuilderFactory.get("closureStep").tasklet(closureTasklet()).build();
}
#Bean
public Tasklet tasklet(){
return new QueryBatchTasklet();
}
#Bean
public Tasklet closureTasklet(){
return new ClosureBatchTasklet();
}
#Bean
#JobScope
public JobParameters jobParamater(){
return new JobParametersBuilder()
.addDate("date", new Date())
.toJobParameters();
}
/**
* This method is used to configure the Dozer Mapper
*
* #return Mapper
* #throws IOException
*/
#Bean(name = "mapper")
public Mapper configDozerMapper() throws IOException {
DozerBeanMapper mapper = new DozerBeanMapper();
return mapper;
}
/**
* This method is used to create RIDC client manager
*
* #return IdcClientManager
*/
#Bean(name = "idcClientmanager")
public IdcClientManager idcClientmanager() {
return new IdcClientManager();
}
}
#Configuration
#EnableTransactionManagement
#ComponentScan(QueryBatchConstants.COMPONENT_SCAN_PACKAGE)
#PropertySource(QueryBatchConstants.CLASSPATH_APPLICATION_PROPERTIES)
public class DataSourcecConfiguration {
#Resource
private Environment env;
#Autowired
public DataSource dataSource;
/**
* This method is used to configure a data source
*
* #return DataSource
* #throws SQLException
*/
#Bean
public DataSource dataSource() throws SQLException {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(env.getRequiredProperty(QueryBatchConstants.DATABASE_DRIVER));
dataSource.setUrl(env.getRequiredProperty(QueryBatchConstants.DATABASE_URL));
dataSource.setUsername(env.getRequiredProperty(QueryBatchConstants.DATABASE_USERNAME));
dataSource.setPassword(env.getRequiredProperty(QueryBatchConstants.DATABASE_PSWRD));
return dataSource;
}
/**
* This method is used to configure a entity manager factory
*
* #return LocalContainerEntityManagerFactoryBean
* #throws SQLException
*/
#Autowired
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() throws SQLException {
LocalContainerEntityManagerFactoryBean entityManager = new LocalContainerEntityManagerFactoryBean();
entityManager.setDataSource(dataSource());
entityManager.setPersistenceProviderClass(HibernatePersistence.class);
entityManager.setPackagesToScan(env.getRequiredProperty(QueryBatchConstants.PACKAGES_TO_SCAN));
entityManager.setJpaProperties(jpaProperties());
return entityManager;
}
/**
* This method is used to configure the JPA properties
*
* #return JPA Properties
*/
private Properties jpaProperties() {
Properties properties = new Properties();
properties.put(QueryBatchConstants.HIBERNATE_DIALECT, env.getRequiredProperty(QueryBatchConstants.HIBERNATE_DIALECT));
properties.put(QueryBatchConstants.HIBERNATE_SHOW_SQL, env.getRequiredProperty(QueryBatchConstants.HIBERNATE_SHOW_SQL));
properties.put(QueryBatchConstants.HIBERNATE_JDBC_META_DATA, env.getRequiredProperty(QueryBatchConstants.HIBERNATE_FALSE));
return properties;
}
/**
* This method is used to configure the transaction manager
*
* #param emf
* #return JpaTransactionManager
*/
#Bean
public JpaTransactionManager transactionManager(EntityManagerFactory emf) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(emf);
return transactionManager;
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
jdbcTemplate.setResultsMapCaseInsensitive(true);
return jdbcTemplate;
}
}
#Component
public class QueryBatchTasklet implements Tasklet{
#Autowired
private QueryBatchService autoClosureService;
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
autoClosureService.updateStatusAndTriggerComm();
return null;
}
}
#Component
public class ClosureBatchTasklet implements Tasklet{
#Autowired
private ClosureBatchService closureBatchService;
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
closureBatchService.updateStatusAndTriggerComm();
return null;
}
}
I had this error all of a sudden. Check if there are records in the database you connect to in the BATCH_JOB_INSTANCE table which don't have matching records in the BATCH_JOB_EXECUTION table. Or perhaps there is some other data inconsistency in the Spring batch metadata tables. If possible, drop and recreate them.
This error came about for us as the result of a database purge job which didn't delete everything it needed to.
Please see my answer on this similar question, it may help.
Related
I have a Spring Boot application using multiple datasources:
#RequiredArgsConstructor
#Configuration
#AutoConfigureAfter({ DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class })
public class FlywayCustomConfig {
#Resource(name = "dataSource")
private final DataSource gameDatasource;
#Resource(name = "profileDataSource")
private final DataSource profileDataSource;
#Resource(name = "authDataSource")
private final DataSource authDataSource;
/**
* Primary Flyway bean. Used for the game database. This database is being wiped clean after every game.
*
* #return Flyway bean
*/
#Primary
#Bean(name = "flyway")
#ConfigurationProperties(prefix = "flyway.game")
public Flyway flywayGame() {
Flyway flyway = Flyway.configure()
.locations("classpath:db/migration/game")
.dataSource(gameDatasource)
.load();
flyway.migrate();
return flyway;
}
/**
* Profile Flyway bean. Used for the profile database. This database is persistent and should not change.
*
* #return Flyway bean
*/
#Bean(name = "flywayProfile")
#ConfigurationProperties(prefix = "flyway.profile")
public Flyway flywayProfile() {
Flyway flyway = Flyway.configure()
.locations("classpath:db/migration/profile")
.dataSource(profileDataSource)
.load();
flyway.migrate();
return flyway;
}
/**
* Auth Flyway bean. Used for the auth database. This database is persistent and should not change.
*
* #return Flyway bean
*/
#Bean(name = "flywayAuth")
#ConfigurationProperties(prefix = "flyway.auth")
public Flyway flywayAuth() {
Flyway flyway = Flyway.configure()
.locations("classpath:db/migration/auth")
.dataSource(authDataSource)
.load();
flyway.migrate();
return flyway;
}
#Bean
#Primary
public FlywayMigrationInitializer flywayInitializerGame(#Qualifier("flyway") Flyway flywayGame) {
return new FlywayMigrationInitializer(flywayGame, null);
}
#Bean
public FlywayMigrationInitializer flywayInitializerProfile(#Qualifier("flywayProfile") Flyway flywayProfile) {
return new FlywayMigrationInitializer(flywayProfile, null);
}
#Bean
public FlywayMigrationInitializer flywayInitializerAuth(#Qualifier("flywayAuth") Flyway flywayAuth) {
return new FlywayMigrationInitializer(flywayAuth, null);
}
}
This is the datasource config for my auth details:
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(
entityManagerFactoryRef = "authEntityManagerFactory",
basePackages = { "com.withergate.api.auth.repository" }
)
public class AuthDbConfig {
/**
* Auth datasource. Used for the auth database.
*
* #return datasource
*/
#Bean(name = "authDataSource")
#ConfigurationProperties(prefix = "auth.datasource")
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
/**
* Auth entity manager factory. Used for the auth database.
*
* #return entity manager factory
*/
#Bean(name = "authEntityManagerFactory")
public LocalContainerEntityManagerFactoryBean entityManagerFactory(EntityManagerFactoryBuilder builder,
#Qualifier("authDataSource") DataSource dataSource) {
return builder
.dataSource(dataSource)
.packages("com.withergate.api.auth.model")
.persistenceUnit("auth")
.build();
}
/**
* Auth transaction manager. Used for the auth database.
*
* #return transaction manager
*/
#Bean(name = "authTransactionManager")
public PlatformTransactionManager transactionManager(
#Qualifier("authEntityManagerFactory") EntityManagerFactory entityManagerFactory) {
return new JpaTransactionManager(entityManagerFactory);
}
}
Everything works fine when I login via thymeleaf but as soon as I try performing some OAuth2 request, I get an exception:
2020-12-11 11:55:30.668 ERROR 412122 --- [nio-8080-exec-5] o.a.c.c.C.[.[.[/].[dispatcherServlet] : Servlet.service() for servlet [dispatcherServlet] in context with path [] threw exception [Request processing failed; nested exception is org.springframework.jdbc.BadSqlGrammarException: PreparedStatementCallback; bad SQL grammar [select client_id, client_secret, resource_ids, scope, authorized_grant_types, web_server_redirect_uri, authorities, access_token_validity, refresh_token_validity, additional_information, autoapprove from oauth_client_details where client_id = ?]; nested exception is java.sql.SQLSyntaxErrorException: Table 'game.oauth_client_details' doesn't exist] with root cause
java.sql.SQLSyntaxErrorException: Table 'game.oauth_client_details' doesn't exist
It seems that OAuth2 is trying to use wrong datasource for fetching the user details.
My OAuth2 config looks as follows:
#Configuration
#EnableAuthorizationServer
public class OAuth2Config extends AuthorizationServerConfigurerAdapter {
private final UserDetailsService userService;
private final TokenStore tokenStore;
#Qualifier("authDataSource")
private final DataSource dataSource;
private final AuthenticationManager authenticationManager;
/**
* OAuth2Config constructor.
*/
public OAuth2Config(
UserDetailsService userService, TokenStore tokenStore, DataSource dataSource, #Lazy AuthenticationManager authenticationManager
) {
this.userService = userService;
this.tokenStore = tokenStore;
this.dataSource = dataSource;
this.authenticationManager = authenticationManager;
}
#Bean
public BCryptPasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
#Override
public void configure(AuthorizationServerSecurityConfigurer security) {
security.tokenKeyAccess("permitAll()").checkTokenAccess("isAuthenticated()");
}
#Override
public void configure(AuthorizationServerEndpointsConfigurer configurer) {
configurer.authenticationManager(authenticationManager);
configurer.userDetailsService(userService);
configurer.tokenStore(tokenStore);
}
#Override
public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
clients.jdbc(dataSource);
}
}
Does anyone have an idea what am doing wrong?
My Multithreaded Spring Batch Step is behaving almost erratically. I haven't been able to discern any kind of pattern in the ways it's failing. Sometimes it reads and writes too many records from the database and sometimes it doesn't read enough.
I'm using a RepositoryItemReader to execute a custom native query. I've defined a countQuery for it and I've used the reader's setMaxItemCount(totalLimit) method, but it seems to consider that more of a suggestion rather than an actual hard maximum. Because with a thread count of 4, and just 1 intentionally bad record that causes 1 skip in the processor logic, I've seen...
limit | pageSize | chunkSize || actual writes
100 | 10 | 5 || 110 unique writes
800 | 100 | 25 || 804 unique writes, and 37 duplicate writes (WHY?)
800 | 100 | 25 || 663 unique writes, and 165 duplicate writes (WHYYYY???)
My project is using Spring Boot 2.1.11.RELEASE and it looks like the version of spring-batch-infrastructure that's pulling in is 4.1.3.RELEASE. Does anyone have any idea why Spring Batch is performing either too many or duplicate writes when just 1 skip occurs on one of the pages?
Maybe it has something to do with the way I've configured my in-memory JobRepository...
Here's my repository class:
#Repository
public interface MyEntityRepository extends JpaRepository<MyEntity, Integer> {
String FROM_MY_ENTITY_TABLE_LEFT_JOINED_WITH_ANOTHER_TABLE = "from {h-schema}my_entity e" +
"left join {h-schema}another_table a" +
"on e.fk = a.pk ";
#Query(
value = "select e.id, e.name, a.additional_info" +
FROM_MY_ENTITY_TABLE_LEFT_JOINED_WITH_ANOTHER_TABLE +
"where e.status <> :status and e.add_date < :date",
countjQuery = "select count(*) " +
FROM_MY_ENTITY_TABLE_LEFT_JOINED_WITH_ANOTHER_TABLE +
"where e.status <> :status and e.add_date < :date",
nativeQuery = true)
Page<MyProjection> findMyProjectionsWithoutStatusBeforeDate(#Param("status") String status,
#Param("date") Date date,
Pageable page);
}
And here's how I've configured my job:
#Configuration
public class ConversionBatchJobConfig {
#Bean
public SimpleCompletionPolicy processChunkSize(#Value("${commit.chunk.size:5}") Integer chunkSize) {
return new SimpleCompletionPolicy(chunkSize);
}
#Bean
#StepScope
public ItemStreamReader<MyProjection> dbReader(
MyEntityRepository myEntityRepository,
#Value("#{jobParameters[startTime]}") Date startTime,
#Value("#{jobParameters[pageSize]}") Integer pageSize,
#Value("#{jobParameters[limit]}") Integer limit) {
RepositoryItemReader<MyProjection> myProjectionRepositoryReader = new RepositoryItemReader<>();
myProjectionRepositoryReader.setRepository(myEntityRepository);
myProjectionRepositoryReader.setMethodName("findMyProjectionsWithoutStatusBeforeDate");
myProjectionRepositoryReader.setArguments(new ArrayList<Object>() {{
add("REMOVED");
add(startTime);
}});
myProjectionRepositoryReader.setSort(new HashMap<String, Sort.Direction>() {{
put("e.id", Sort.Direction.ASC);
}});
myProjectionRepositoryReader.setPageSize(pageSize);
myProjectionRepositoryReader.setMaxItemCount(limit);
myProjectionRepositoryReader.setSaveState(false);
return myProjectionRepositoryReader;
}
#Bean
#StepScope
public ItemProcessor<MyProjection, JsonMessage> dataConverter(AdditionalDbDataRetrievalService dataRetrievalService) {
return new MyProjectionToJsonMessageConverter(dataRetrievalService); // <== simple ItemProcessor implementation
}
#Bean
#StepScope
public ItemWriter<JsonMessage> jsonPublisher(GcpPubsubPublisherService publisherService) {
return new JsonMessageWriter(publisherService); // <== simple ItemWriter implementation
}
#Bean
public Step conversionProcess(SimpleCompletionPolicy processChunkSize,
ItemStreamReader<MyProjection> dbReader,
ItemProcessor<MyProjection, JsonMessage> dataConverter,
ItemWriter<JsonMessage> jsonPublisher,
StepBuilderFactory stepBuilderFactory,
TaskExecutor conversionThreadPool,
#Value("${conversion.failure.limit:20}") int maximumFailures) {
return stepBuilderFactory.get("conversionProcess")
.<MyProjection, JsonMessage>chunk(processChunkSize)
.reader(dbReader)
.processor(dataConverter)
.writer(jsonPublisher)
.faultTolerant()
.skipPolicy(new MyCustomConversionSkipPolicy(maximumFailures))
// ^ for now this returns true for everything until 20 failures
.listener(new MyConversionSkipListener(processStatus))
// ^ for now this just logs the error
.taskExecutor(conversionThreadPool)
.build();
}
#Bean
public Job conversionJob(Step conversionProcess,
JobBuilderFactory jobBuilderFactory) {
return jobBuilderFactory.get("conversionJob")
.start(conversionProcess)
.build();
}
}
And here's how I've configured my in-memory Job Repository:
#Configuration
#EnableBatchProcessing
public class InMemoryBatchManagementConfig {
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
ResourcelessTransactionManager resourcelessTransactionManager = new ResourcelessTransactionManager();
return resourcelessTransactionManager;
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(ResourcelessTransactionManager resourcelessTransactionManager)
throws Exception {
MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(resourcelessTransactionManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(MapJobRepositoryFactoryBean factory) throws Exception {
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) throws Exception {
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
launcher.afterPropertiesSet();
return launcher;
}
#Bean
public JobExplorer jobExplorer(MapJobRepositoryFactoryBean factory) {
return new SimpleJobExplorer(factory.getJobInstanceDao(), factory.getJobExecutionDao(),
factory.getStepExecutionDao(), factory.getExecutionContextDao());
}
#Bean
public BatchConfigurer batchConfigurer(MapJobRepositoryFactoryBean mapJobRepositoryFactory,
ResourcelessTransactionManager resourceslessTransactionManager,
SimpleJobLauncher jobLauncher,
JobExplorer jobExplorer) {
return new BatchConfigurer() {
#Override
public JobRepository getJobRepository() throws Exception {
return mapJobRepositoryFactory.getObject();
}
#Override
public PlatformTransactionManager getTransactionManager() throws Exception {
return resourceslessTransactionManager;
}
#Override
public JobLauncher getJobLauncher() throws Exception {
return jobLauncher;
}
#Override
public JobExplorer getJobExplorer() throws Exception {
return jobExplorer;
}
};
}
}
EDIT
Was able to get Spring Batch working with an H2 database instead of a Map repository, but I'm still seeing the same issue. Here's how I configured batch to use H2:
I imported the H2 driver:
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.4.200</version>
</dependency>
I configured my primary DB config to point to my JPA entities:
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(basePackages = "com.company.project.jpa.repository", transactionManagerRef = "transactionManager")
#EntityScan(basePackages = "com.company.project.jpa.entity")
public class DbConfig {
#Bean
#Primary
#ConfigurationProperties("oracle.datasource")
public DataSource dataSource() {
return DataSourceBuilder.create().build();
}
#Bean
#Primary
public LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource,
EntityManagerFactoryBuilder builder) {
return builder.dataSource(dataSource).packages("com.company.project.jpa").build();
}
#Bean
#Primary
public PlatformTransactionManager transactionManager(
#Qualifier("entityManagerFactory") LocalContainerEntityManagerFactoryBean entityManagerFactory) {
return new JpaTransactionManager(entityManagerFactory.getObject());
}
}
And then I configured my in-memory Batch management like this:
#Configuration
#EnableBatchProcessing
public class InMemoryBatchManagementConfig {
#Bean(destroyMethod = "shutdown")
public EmbeddedDatabase h2DataSource() {
return new EmbeddedDatabaseBuilder().setType(EmbeddedDatabaseType.H2)
.addScript("classpath:org/springframework/batch/core/schema-drop-h2.sql")
.addScript("classpath:org/springframework/batch/core/schema-h2.sql")
.build();
}
#Bean
public LocalContainerEntityManagerFactoryBean h2EntityManagerFactory(EmbeddedDatabase h2DataSource,
EntityManagerFactoryBuilder builder) {
return builder.dataSource(h2DataSource).packages("org.springframework.batch.core").build();
}
#Bean
public PlatformTransactionManager h2TransactionManager(
#Qualifier("h2EntityManagerFactory") LocalContainerEntityManagerFactoryBean h2EntityManagerFactory) {
return new JpaTransactionManager(h2EntityManagerFactory.getObject());
}
#Bean
public JobRepository jobRepository(EmbeddedDatabase h2DataSource,
#Qualifier("h2TransactionManager") PlatformTransactionManager h2TransactionManager) throws Exception {
final JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDatabaseType(DatabaseType.H2.getProductName());
factory.setDataSource(h2DataSource);
factory.setTransactionManager(h2TransactionManager);
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
#Bean
public JobRepositoryFactoryBean jobRepositoryFactoryBean(EmbeddedDatabase h2DataSource,
#Qualifier("h2TransactionManager") PlatformTransactionManager h2TransactionManager) {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(h2DataSource);
jobRepositoryFactoryBean.setTransactionManager(h2TransactionManager);
return jobRepositoryFactoryBean;
}
#Bean
public BatchConfigurer batchConfigurer(JobRepository jobRepository,
SimpleJobLauncher jobLauncher,
#Qualifier("h2TransactionManager") PlatformTransactionManager h2TransactionManager,
JobExplorer jobExplorer) {
return new BatchConfigurer() {
#Override
public JobRepository getJobRepository() {
return jobRepository;
}
#Override
public PlatformTransactionManager getTransactionManager() {
return h2TransactionManager;
}
#Override
public JobLauncher getJobLauncher() {
return jobLauncher;
}
#Override
public JobExplorer getJobExplorer() {
return jobExplorer;
}
};
}
}
I'm working with spring data, i create config class with #Bean, #Entity and Main.java but when run project i get exception:
Exception in thread "main" java.lang.NullPointerException
#Autowired annotation don't work!
Main.java
public class Main {
#Autowired
private static TodoRepository todoRepository;
public static void main(String[] args) {
Todo todo = new Todo();
todo.setId(1l);
todo.setTitle("title");
System.out.println(todoRepository); //null
todoRepository.save(todo); //Exception in thread "main" java.lang.NullPointerException
}
}
Context class
#Configuration
#EnableJpaRepositories(basePackages = {"repository"},
entityManagerFactoryRef = "entityManagerFactory",
transactionManagerRef = "transactionManager")
#EnableTransactionManagement
#PropertySource("classpath:app.properties")
public class PersistenceContext {
public PersistenceContext() {
}
/**
* The method that configures the datasource bean
* */
#Resource
private Environment env;
#Bean(destroyMethod = "close")
DataSource dataSource() {
HikariConfig dataSourceConfig = new HikariConfig();
dataSourceConfig.setJdbcUrl(env.getRequiredProperty("db.url"));
dataSourceConfig.setDriverClassName(env.getRequiredProperty("db.driver"));
dataSourceConfig.setUsername(env.getRequiredProperty("db.username"));
dataSourceConfig.setPassword(env.getRequiredProperty("db.password"));
return new HikariDataSource(dataSourceConfig);
}
/**
* The method that configures the entity manager factory
* */
#Bean
LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource, Environment env) {
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
entityManagerFactoryBean.setDataSource(dataSource);
entityManagerFactoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter());
entityManagerFactoryBean.setPackagesToScan("entity");
Properties jpaProperties = new Properties();
jpaProperties.put("hibernate.dialect", env.getRequiredProperty("hibernate.dialect"));
jpaProperties.put("hibernate.hbm2ddl.auto", env.getRequiredProperty("hibernate.hbm2ddl.auto"));
jpaProperties.put("hibernate.show_sql", env.getRequiredProperty("hibernate.show_sql"));
jpaProperties.put("hibernate.format_sql", env.getRequiredProperty("hibernate.format_sql"));
entityManagerFactoryBean.setJpaProperties(jpaProperties);
return entityManagerFactoryBean;
}
/**
* The method that configures the transaction manager
* */
#Bean
JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory);
return transactionManager;
}
}
Repositories
public interface TodoRepository extends CrudRepository<Todo, Long> {
}
Stacktrace
null
Exception in thread "main" java.lang.NullPointerException
at Main.main(Main.java:28)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
Process finished with exit code 1
Your main class is not a managed spring bean. You need to create an ApplicationContext, see below:
public class Main {
public static void main(String[] args) {public static void main(String[] args) {
ApplicationContext ctx = new AnnotationConfigApplicationContext(PersistenceContext.class);
TodoRepository todoRepository = ctx.getBean(TodoRepository.class);
Todo todo = new Todo();
todo.setId(1l);
todo.setTitle("title");
System.out.println(todoRepository); // not null
todoRepository.save(todo);
}
}
I'm using spring-jpa. I have 2 tests.
#Test
#Transactional
public void testFindAll() {
List<Engine> eList = engineService.findAll();
Engine e = eList.get(0); //this engine id=3
List<Translation> tList = e.getTranslations();
for(Translation t : tList) {
...
}
}
This method fails with this exception:
org.hibernate.LazyInitializationException: failed to lazily initialize
a collection of role: xxx.Engine.translations, could not initialize
proxy - no Session
However, this method works just fine:
#Test
#Transactional
public void testFindOne() {
Engine e = engineService.findOne(3);
List<Translation> tList = e.getTranslations();
for(Translation t : tList) {
...
}
}
Why translation list is successfully loaded in one case, but not in another?
EDIT: service/repo code:
public interface EngineRepository extends JpaRepository<Engine, Integer>
{
}
.
#Service
#Transactional
public class EngineService
{
#Autowired
private EngineRepository engineRepository;
public List<Engine> findAll()
{
return engineRepository.findAll();
}
public Engine findOne(Integer engId)
{
return engineRepository.findOne(engId);
}
}
.
public class Engine implements Serializable {
...
#OneToMany
#JoinColumn(name="ID", referencedColumnName="TRAN_ID", insertable=false, updatable=false, nullable=true)
#LazyCollection(LazyCollectionOption.EXTRA)
private List<Translation> translations;
...
}
Config:
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(basePackages = {"xxx.dao"})
#ComponentScan(basePackages = {"xxx.dao", "xxx.service", "xxx.bean"})
#PropertySource("classpath:application.properties")
public class SpringDataConfig {
#Autowired
private Environment environment;
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setUrl(environment.getProperty("db.url"));
dataSource.setDriverClassName(environment.getProperty("db.driverClass"));
dataSource.setUsername(environment.getProperty("db.username"));
dataSource.setPassword(environment.getProperty("db.password"));
return dataSource;
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() throws NamingException {
HibernateJpaVendorAdapter hibernateJpaVendorAdapter = new HibernateJpaVendorAdapter();
hibernateJpaVendorAdapter.setDatabase(Database.POSTGRESQL);
Properties properties = new Properties();
properties.put("hibernate.dialect", environment.getProperty("hibernate.dialect"));
properties.put("hibernate.show_sql", environment.getProperty("hibernate.showSQL"));
properties.put("hibernate.format_sql", environment.getProperty("hibernate.formatSQL"));
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
entityManagerFactoryBean.setDataSource(dataSource());
entityManagerFactoryBean.setPackagesToScan("xxx.model");
entityManagerFactoryBean.setJpaVendorAdapter(hibernateJpaVendorAdapter);
entityManagerFactoryBean.setJpaProperties(properties);
return entityManagerFactoryBean;
}
#Bean
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory);
return transactionManager;
}
}
I think the problem here is that the session gets closed after the first line in the first case. You should check out the JpaRepository implementation of findAll().
Integration Testing with Spring
It seems you're failing to provide a Spring Context within your TestCase, what that means? The #Transactional is being ignored. Therefore you end up with the closed session exception, because there is no transaction.
Take a look how to configure a TestCase with a Spring Context here
#RunWith(SpringJUnit4ClassRunner.class)
// ApplicationContext will be loaded from AppConfig and TestConfig
#ContextConfiguration(classes = {AppConfig.class, TestConfig.class})
public class MyTest {
#Autowired
EngineService engineService;
#Test
#Transactional
public void testFindOne() {}
#Test
#Transactional
public void testFindAll() {}
}
I created a SpringBatch application with java configuration. I have a main method and a class that represents a job.
#ComponentScan
#EnableAutoConfiguration
public class App
{
public static void main( String[] args )
{
System.out.println( "Starting Spring Batch Execution -------" );
SpringApplication.run(App.class, args);
}
}
#Configuration
#EnableBatchProcessing
public class FlatFileJob {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
/**
* Create and configure job
* #return
*/
#Bean(name = "Read RabbitMQ")
public Job addFlatFileJob(){
return jobs.get("carJob")
.start(this.flatFileStep())
.build();
}
/**
* Create and configure the only step
* #return
*/
#Bean
public Step flatFileStep(){
return steps.get("step")
.<Car, Car> chunk(3)
.reader(new CarItemReader())
.processor(new CarItemProcessor())
.writer(new CarItemWriter())
.build();
}
#Bean
public PlatformTransactionManager transactionManager(){
return new ResourcelessTransactionManager();
}
#Bean
public JobRepository jobRepository() throws Exception{
JobRepository jobRepository = (JobRepository) new JobRepositoryFactoryBean();
return jobRepository;
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource){
return new JdbcTemplate(dataSource);
}
#Bean
public DataSource getDataSource(){
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName("org.postgresql.Driver");
dataSource.setUrl("jdbc:postgresql://127.0.0.1:5432/spring_batch");
dataSource.setUsername("xxx");
dataSource.setPassword("xxx");
return dataSource;
}
}
When I execute this, I got an exception.
Caused by: java.lang.IllegalArgumentException: DataSource must not be null.
at org.springframework.util.Assert.notNull(Assert.java:112)
at org.springframework.batch.core.repository.support.JobRepositoryFactoryBean.afterPropertiesSet(JobRepositoryFactoryBean.java:171)
at org.springframework.batch.core.repository.support.AbstractJobRepositoryFactoryBean.getObject(AbstractJobRepositoryFactoryBean.java:202)
at neoway.com.job.FlatFileJob.jobRepository(FlatFileJob.java:88)
at neoway.com.job.FlatFileJob$$EnhancerBySpringCGLIB$$7ec3c4f6.CGLIB$jobRepository$0(<generated>)
at neoway.com.job.FlatFileJob$$EnhancerBySpringCGLIB$$7ec3c4f6$$FastClassBySpringCGLIB$$990caa45.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invokeSuper(MethodProxy.java:228)
at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.intercept(ConfigurationClassEnhancer.java:312)
at neoway.com.job.FlatFileJob$$EnhancerBySpringCGLIB$$7ec3c4f6.jobRepository(<generated>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:166)
I configure all with java, including DataSource. I don't know why Spring didn't recognize DataSource configuration. What's the problem?