EDIT: ALthough i think hibernate should be able to do this automatically, i have a DDL scri[pt containing the structure I need. If i can run this script so that hibernate automatically create the tables, i'm happy for now... `see this question
My excuses for the lengthy post. My question, though might be quite simple, but being a hibernate and spring noob, this is bugging me for this whole morning....
I'm using spring batch. When running an import job, Spring wants to store meta data about job excution in a table named BATCH_JOB. However, this table is not created by Hibernate.
I run the job by:
public static void main(String[] args) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException {
ApplicationContext context = new ClassPathXmlApplicationContext("bean_configuration.xml");
SimpleJobLauncher launcher = (SimpleJobLauncher) context.getBean("jobLauncher");
FlowJob job = (FlowJob) context.getBean("importIVSJob");
launcher.run(job, new JobParameters());
}
My hibernate properties file:
#hibernate.c3p0.min_size=5
#hibernate.c3p0.max_size=20
#hibernate.c3p0.timeout=1800
#hibernate.c3p0.max_statements=50
hibernate.dialect=org.hibernate.dialect.HSQLDialect
hibernate.show_sql=true
hibernate.hbm2ddl.auto=create
and the xml config:
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/jdbc
http://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-2.5.xsd
http://www.springframework.org/schema/batch
http://www.springframework.org/schema/batch/spring-batch-2.1.xsd">
<batch:job job-repository="jobRepository" id="importIVSJob">
<batch:step id="step1">
<batch:tasklet transaction-manager="transactionManager">
<batch:chunk reader="csvFileReader" writer="itemWriter"
commit-interval="5" />
</batch:tasklet>
</batch:step>
</batch:job>
<bean id="csvFileReader" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="resource" ref="inputResource" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer"
p:delimiter=";"
p:names="objectcode, event, beginTijd, duurEvent, kolkNummer, deelkolkNummer, vaarrichting, eniNummer, aantalSchepen, scheepType, vaartType, subTypeVaart, rwsKlasseHoofgroep, rwsKlasseSubgroep, cemtKlasse, laadvermogen, dwtLaadvermogen, scheepslengte, scheepsbreedte, scheepsdiepgang, scheepshoogte, vlagCode, beladingsCode, cargoGewicht, seinVoeringKegel, vrachtAanBoord, aantalContainers, aantalTEU_containers, reisId, toerbeurt, invaartGroen, uitvaartGroen" />
</property>
<property name="fieldSetMapper">
<bean
class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper"
p:targetType="nl.triopsys.styx.importeer.IVS.IvsBericht" />
</property>
</bean>
</property>
</bean>
<bean id="itemWriter" class="org.springframework.batch.item.file.FlatFileItemWriter">
<property name="resource" ref="outputResource" />
<property name="lineAggregator">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="," />
<property name="fieldExtractor">
<bean
class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names"
value="objectcode, event, beginTijd, duurEvent, kolkNummer, deelkolkNummer, vaarrichting, eniNummer, aantalSchepen, scheepType, vaartType, subTypeVaart, rwsKlasseHoofgroep, rwsKlasseSubgroep, cemtKlasse, laadvermogen, dwtLaadvermogen, scheepslengte, scheepsbreedte, scheepsdiepgang, scheepshoogte, vlagCode, beladingsCode, cargoGewicht, seinVoeringKegel, vrachtAanBoord, aantalContainers, aantalTEU_containers, reisId, toerbeurt, invaartGroen, uitvaartGroen" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="inputResource" class="org.springframework.core.io.FileSystemResource">
<constructor-arg index="0" value="file:C:\Users\jgoddijn\testdata\IVS90.csv" />
</bean>
<bean id="outputResource" class="org.springframework.core.io.FileSystemResource">
<constructor-arg index="0" value="C:\Users\jgoddijn\testdata\output.csv" />
</bean>
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer"
p:location="batch.properties" p:ignoreUnresolvablePlaceholders="true" />
<bean id="dataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="org.hsqldb.jdbcDriver" />
<property name="url"
value="jdbc:hsqldb:file:/D:/DMVV/HSQLDB;shutdown=true;hsqldb.write_delay=false" />
<property name="username" value="sa" />
<property name="password" value="" />
</bean>
<bean id="transactionManager"
class="org.springframework.orm.hibernate4.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
<bean id="sessionFactory"
class="org.springframework.orm.hibernate4.LocalSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="annotatedClasses">
<list>
<value>nl.triopsys.styx.importeer.IVS.IvsBericht</value>
<value>nl.triopsys.styx.domain.Vaartuig</value>
<value>nl.triopsys.styx.domain.AIS.MMSI</value>
<value>nl.triopsys.styx.domain.IVS90.valuetype.Scheepsnummer</value>
<value>nl.triopsys.styx.domain.IVS90.valuetype.EniNummer</value>
<value>nl.triopsys.styx.domain.IVS90.valuetype.ImoNummer</value>
</list>
</property>
<property name="hibernateProperties">
<bean
class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="location" value="batch.properties" />
</bean>
</property>
</bean>
<bean id="jobRegistry"
class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher"
p:jobRepository-ref="jobRepository" />
<bean id="jobRegistryBeanPostProcessor"
class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor"
p:jobRegistry-ref="jobRegistry" />
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean"
p:dataSource-ref="dataSource" p:transactionManager-ref="transactionManager" />
</beans>
and the error i get:
Exception in thread "main" org.hibernate.exception.SQLGrammarException: user lacks privilege or object not found: BATCH_JOB_INSTANCE
at org.hibernate.exception.internal.SQLExceptionTypeDelegate.convert(SQLExceptionTypeDelegate.java:83)
at org.hibernate.exception.internal.StandardSQLExceptionConverter.convert(StandardSQLExceptionConverter.java:49)
at org.hibernate.engine.jdbc.spi.SqlExceptionHelper.convert(SqlExceptionHelper.java:125)
at org.hibernate.engine.jdbc.spi.SqlExceptionHelper.convert(SqlExceptionHelper.java:110)
at org.hibernate.engine.jdbc.internal.proxy.ConnectionProxyHandler.continueInvocation(ConnectionProxyHandler.java:146)
at org.hibernate.engine.jdbc.internal.proxy.AbstractProxyHandler.invoke(AbstractProxyHandler.java:81)
at $Proxy20.prepareStatement(Unknown Source)
at org.springframework.jdbc.core.JdbcTemplate$SimplePreparedStatementCreator.createPreparedStatement(JdbcTemplate.java:1436)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:581)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:637)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:666)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:674)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:714)
at org.springframework.jdbc.core.simple.SimpleJdbcTemplate.query(SimpleJdbcTemplate.java:204)
at org.springframework.jdbc.core.simple.SimpleJdbcTemplate.query(SimpleJdbcTemplate.java:209)
at org.springframework.batch.core.repository.dao.JdbcJobInstanceDao.getJobInstance(JdbcJobInstanceDao.java:221)
at org.springframework.batch.core.repository.support.SimpleJobRepository.getLastJobExecution(SimpleJobRepository.java:253)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:318)
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:183)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:150)
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:106)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:172)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:202)
at $Proxy19.getLastJobExecution(Unknown Source)
at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:94)
at nl.triopsys.styx.sandbox.importeer.RunImport.main(RunImport.java:39)
It seems that no table is created for the job execution. How to get hibernate to do this automatically?
Thank you!
The quickest way is to specify the script as an initialise parameter on the H2 DB URL:
jdbc:h2:~/mydb;init=runscript from 'classpath:org/springframework/batch/core/schema-h2.sql'
Providing the spring-batch-core jar is on your classpath it will create these tables on the first run of your test. You can then remove it once the tables are created.
Related
I have the following spring batch job with a partitioned step that creates 3600 partitions for a partitioned step. I use a ThreadPoolTaskExecutor with a max pool size 100 and a queue capacity of 100 (although it seems to make no difference for speed). Im using Visual VM to monitor the threads and I notice the taskExecutor threads don't start until > 5 minutes after starting the job.
Oddly enough, If I limit the number of partitions to 100 the threads start fairly quickly and finish in about a minute.
Another issue I notice is that there doesn't seem to be more than one database connection ever as seen in the VisualVM thread visualization
Can someone please review my batch job below and tell me if I am missing something that would limit the number of database connections to 1? Also, why would adding more partitions affect the performance if my ThreadPoolTaskExecutor parameters don't change? Shouldn't the jobs just sit in a queue until there is a thread able to service them?
--- Spring batch job ---
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:util="http://www.springframework.org/schema/util"
xmlns:lang="http://www.springframework.org/schema/lang"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.0.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd
http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.0.xsd">
<context:property-placeholder location="${ext.properties.dataManipulation.Properties}"/>
<import resource="${ext.properties.dataManipulation.Connection}"/>
<import resource="flatFileLineProperties.xml"/>
<!-- JobRepository and JobLauncher are configuration/setup classes -->
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean"/>
<bean id="transactionManager"
class="org.springframework.batch.support.transaction.ResourcelessTransactionManager"/>
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository"/>
</bean>
<util:map id="keys" map-class="java.util.HashMap">
<entry key="SAILING_ID" value="ASCENDING" value-type="org.springframework.batch.item.database.Order"/>
<entry key="RES_ID" value="ASCENDING" value-type="org.springframework.batch.item.database.Order" />
</util:map>
<!-- Here is my partioned step -->
<bean id="reservationsItemReader" class="org.springframework.batch.item.database.JdbcPagingItemReader" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="queryProvider">
<bean class="org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="selectClause">
<value>
<![CDATA[
GUEST_ID,
FIRST_NAME,
LAST_NAME,
TITLE,
HOUSEHOLD_NAME,
SAILING_ID,
RES_ID
]]>
</value>
</property>
<property name="fromClause" value="FROM RESERVATION "/>
<property name="whereClause" >
<value>
<![CDATA[ AND SAIL_ID = :sailId
]]>
</value>
</property>
<!--<property name="sortKey" value="SAILING_ID" />-->
<property name="sortKeys" ref="keys"/>
<!--<property name="sortKeys" ref="sortKeys"/>-->
</bean>
</property>
<property name="parameterValues">
<map>
<!--<entry key="shipCode" value="#{stepExecutionContext[shipCode]}" />-->
<entry key="sailId" value="#{stepExecutionContext[sailId]}" />
</map>
</property>
<!--property name="pageSize" value="500000" /-->
<property name="pageSize" value="40000" />
<property name="rowMapper">
<bean class="com.ncl.endeca.mapper.ColumnToHashMapper" />
</property>
</bean>
<bean id="sortKeys" class="java.util.HashMap" scope="prototype" >
<constructor-arg>
<map key-type="java.lang.String" value-type="org.springframework.batch.item.database.Order">
<entry key="SAILING_ID" value="ASCENDING" />
<entry key="RES_ID" value="ASCENDING" />
</map>
</constructor-arg>
</bean>
<util:list id="client_fields" value-type="java.lang.String">
<value>FIRST_NAME</value>
<value>LAST_NAME</value>
<value>TITLE</value>
<value>HOUSEHOLD_NAME</value>
</util:list>
<bean id="reservationsItemWriter" class="com.ncl.endeca.writer.ReservationWriter" scope="step">
<property name="guestFields" ref="client_fields" />
<property name="outPrefix" value="${file.out.prefix}" />
<property name="shipCode" value="#{stepExecutionContext[shipCode]}" />
<property name="sailId" value="#{stepExecutionContext[sailId]}" />
<property name="soldOutSailings" ref="soldOutSailingsList" />
</bean>
<bean id="yearsAgo" class="java.lang.Integer">
<constructor-arg>
<value>${yearsAgo}</value>
</constructor-arg>
</bean>
<bean id="yearsAhead" class="java.lang.Integer">
<constructor-arg>
<value>${yearsAhead}</value>
</constructor-arg>
</bean>
<bean id="resPartitioner" class="com.ncl.endeca.partition.ReservationPartitioner">
<property name="yearsAgo" ref="yearsAgo" />
<property name="yearsAhead" ref="yearsAhead" />
<property name="batchLimit" value="${batch.limit}" />
<property name="dataSource" ref="dataSource"/>
</bean>
<bean id="taskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="${batch.corePoolSize}" />
<property name="maxPoolSize" value="${batch.maxPoolSize}" />
<property name="queueCapacity" value="${batch.queueCapacity}" />
</bean>
<!--<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor"/>-->
<!-- each thread will run this job, with different stepExecutionContext values. -->
<step id="slave" xmlns="http://www.springframework.org/schema/batch" >
<flow parent="readReservations"/>
</step>
<!--<bean id="countrySpecificCompletionPolicy" class="org.springframework.batch.core.resource.StepExecutionSimpleCompletionPolicy">-->
<!--<property name="keyName" value="sailId"/>-->
<!--</bean>-->
<batch:flow id="readReservations">
<batch:step id="reservations" xmlns="http://www.springframework.org/schema/batch" >
<tasklet throttle-limit="${batch.corePoolSize}">
<chunk reader="reservationsItemReader" writer="reservationsItemWriter" commit-interval="50000" />
</tasklet>
</batch:step>
</batch:flow>
<!-- Actual Job -->
<batch:job id="dataManipulationJob">
<batch:step id="masterStep">
<batch:partition step="slave" partitioner="resPartitioner">
<batch:handler grid-size="100" task-executor="taskExecutor" />
</batch:partition>
</batch:step>
</batch:job>
I have tried BasicDataSource and Hikari connections but the pool sizes have no affect when I monitor VisualVM
---- connection.xml ----
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:util="http://www.springframework.org/schema/util"
xmlns:lang="http://www.springframework.org/schema/lang"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.0.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd
http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.0.xsd">
<bean id="hikariConfig" class="com.zaxxer.hikari.HikariConfig">
<property name="poolName" value="springHikariCP" />
<property name="connectionTestQuery" value="SELECT 10 from dual" />
<property name="dataSourceClassName" value="${hibernate.dataSourceClassName}" />
<property name="maximumPoolSize" value="${batch.maxPoolSize}" />
<property name="idleTimeout" value="${hibernate.hikari.idleTimeout}" />
<property name="dataSourceProperties">
<props>
<prop key="url">${dataSource.url}</prop>
<prop key="user">${dataSource.username}</prop>
<prop key="password">${dataSource.password}</prop>
</props>
</property>
</bean>
<!-- HikariCP configuration -->
<!--<bean id="dataSource" class="com.zaxxer.hikari.HikariDataSource" destroy-method="close">-->
<!--<constructor-arg ref="hikariConfig" />-->
<!--</bean>-->
<!--<bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close" scope="step">-->
<!--<property name="driverClassName" value="${hibernate.dataSourceClassName}" />-->
<!--<property name="url" value="${dataSource.url}" />-->
<!--<property name="username" value="${dataSource.username}" />-->
<!--<property name="password" value="${dataSource.password}" />-->
<!--<property name="testWhileIdle" value="false"/>-->
<!--<property name="maxActive" value="${batch.corePoolSize}"/>-->
<!--</bean>-->
<!-- connect to database -->
<bean id="dataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource" >
<property name="driverClassName" value="oracle.jdbc.OracleDriver" />
<property name="url" value="jdbc:oracle:thin:#******" />
<property name="username" value="****" />
<property name="password" value="****" />
</bean>
The time to create partitions depends on the performance of your ReservationPartitioner as well as the number of partitions. Creating 3600 partitions means creating 3600 StepExecution/ExecutionContext objects and persisting them in the corresponding tables. This can take time for such a high number of partitions.
In regards to the database connection, you are using the MapJobRepositoryFactoryBean with a ResourcelessTransactionManager so there are no interactions with a database for Spring Batch meta-data. The only component that interacts with the database according to your configuration is the JdbcPagingItemReader (I don't know what is the type of your ReservationWriter) so it is not surprising to see a single database connection.
I have this error when run spring project by -cp
java -cp "parser.jar" hu.daniel.hari.learn.spring.orm.main.SpringOrmMain
Exception in thread "main" org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Unable to locate Spring NamespaceHandler for XML schema namespace [http://www.springframework.org/schema/p]
Offending resource: class path resource [spring.xml]
at org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)
at org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:85)
at org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:80)
at org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:309)
at org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.decorateIfRequired(BeanDefinitionParserDelegate.java:1464)
at org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.decorateBeanDefinitionIfRequired(BeanDefinitionParserDelegate.java:1440)
at org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.decorateBeanDefinitionIfRequired(BeanDefinitionParserDelegate.java:1428)
My beans.xml
<?xml version="1.0" encoding="UTF-8"?>
http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx.xsd
">
<!-- Scans the classpath for annotated components that will be auto-registered
as Spring beans -->
<context:component-scan base-package="hu.daniel.hari.learn.spring" />
<!-- Activates various annotations to be detected in bean classes e.g: #Autowired -->
<context:annotation-config />
<!-- <bean id="dataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="org.hsqldb.jdbcDriver" /> <property
name="url" value="jdbc:hsqldb:mem://productDb" /> <property name="username"
value="sa" /> <property name="password" value="" /> </bean> -->
<bean id="dataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="org.postgresql.Driver" />
<property name="url" value="jdbc:postgresql://localhost:5432/newparser" />
<property name="username" value="postgres" />
<property name="password" value="postgres" />
<!--<property name="socketTimeout" value="10"/> -->
<property name="connectionProperties">
<props>
<prop key="socketTimeout">10</prop>
</props>
</property>
</bean>
<bean id="entityManagerFactory"
class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean"
p:packagesToScan="hu.daniel.hari.learn.spring.orm.model"
p:dataSource-ref="dataSource">
<property name="jpaVendorAdapter">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
<property name="generateDdl" value="true" />
<property name="showSql" value="true" />
</bean>
</property>
</bean>
<!-- Transactions -->
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
<property name="entityManagerFactory" ref="entityManagerFactory" />
</bean>
<tx:annotation-driven transaction-manager="transactionManager" />
Add namespace for the shorthand property (p:)
<beans
...
xmlns:p="http://www.springframework.org/schema/p"
Getting below error while executing a batch job. Not immediate but after half an hour.
2016-02-17 15:24:25,106 ERROR [AbstractStep ][poolTaskExecutor-2 ] Encountered an error executing step processSubscriptionFile-stepTwo in job subscriptionJob
org.springframework.jdbc.UncategorizedSQLException: Attempt to process next row failed; uncategorized SQLException for SQL [
SELECT DISTINCT PERSON_ID FROM CUST_SUB_INTER where job_id = 77317301 order by PERSON_ID
]; SQL state [99999]; error code [17010]; Closed Resultset: next; nested exception is java.sql.SQLException: Closed Resultset: next
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:84)
at
My batch job files is as below. In first step I am reading the data from a file and adding to an intermediate table. This chunk uses partition and multiple threads. This step uses partitions and threads. In second step I am reading the data from this temporary table as distinct customer ids and passing it to writer. It is failing halfway through. There are 70000 records in intermediate table and 10000 distinct customer ids. Second step does not use partition and threads. Please help
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"
default-autowire="byName">
<bean id="SubscriptionJob-jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass" value="uk.co.batch.datamigration.JobLaunchDetails"/>
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="SubscriptionJob" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>
<!-- Job -->
<batch:job id="SubscriptionJob" restartable="false">
<batch:step id="copySubscriptionFile" next="processSubscriptionFile">
<tasklet ref="copySubscriptionFileLocally" />
</batch:step>
<batch:step id="processSubscriptionFile" next="processSubscriptionFile-stepTwo">
<partition step="processSubscriptionFile-stepOne" partitioner="SubscriptionPartitioner">
<handler grid-size="20" task-executor="SubscriptionTaskExecutor"/>
</partition>
</batch:step>
<batch:step id="processSubscriptionFile-stepTwo">
<batch:tasklet>
<batch:chunk reader="SubscriptionItemStepTwoReader" writer="SubscriptionItemStepTwoWriter" commit-interval="200"/>
</batch:tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="SubscriptionJobListener"/>
<batch:listener ref="SubscriptionJobNotifier"/>
</batch:listeners>
</batch:job>
<batch:step id="processSubscriptionFile-stepOne">
<batch:tasklet>
<batch:chunk reader="SubscriptionItemStepOneReader" writer="SubscriptionItemStepOneWriter" commit-interval="200"/>
</batch:tasklet>
</batch:step>
<bean id ="SubscriptionJobNotifier" class="uk.co.and.batch.status.JobExecutionNotifier">
<property name="snsEventType" value="SUBSCRIPTION_JOB_NOTIFICATION"/>
<property name="onlyNotifyFailures" value="true"/>
</bean>
<bean id="SubscriptionPartitioner" class="uk.co.batch.datamigration.FlatFilePartitioner" scope="step">
<property name="resource" value="file:#{jobExecutionContext[tempSubscriptionFile]}" />
</bean>
<bean id="SubscriptionItemStepOneReader" scope="step" autowire-candidate="false" parent="SubscriptionItemStepOneReaderParent">
<property name="resource" value="file:#{jobExecutionContext[tempSubscriptionFile]}" />
<property name="startAt" value="#{stepExecutionContext['startAt']}"/>
<property name="itemsCount" value="#{stepExecutionContext['itemsCount']}"/>
</bean>
<bean id="SubscriptionItemStepOneReaderParent" abstract="true"
class="uk.co.batch.datamigration.MultiThreadedFlatFileItemReader">
<property name="linesToSkip" value="1"/>
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="^"/>
<property name="names">
<list >
<value>city_name</value>
<value>country_name</value>
<value>publication_name</value>
<value>subscription_ref_code</value>
<value>subscription_state</value>
<value>unsubscribed</value>
<value>city_id</value>
<value>person_id</value>
<value>subscription_created_at</value>
<value>subscription_updated_at</value>
<value>end_value</value>
</list>
</property>
</bean>
</property>
<property name="fieldSetMapper">
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="SubscriptionVO"/>
</bean>
</property>
</bean>
</property>
<property name="recordSeparatorPolicy">
<bean class="org.springframework.batch.item.file.separator.DefaultRecordSeparatorPolicy"/>
</property>
</bean>
<bean id="SubscriptionVO" class="uk.co.batch.datamigration.subscription.SubscriptionVO" scope="prototype"/>
<!-- TODO:Reduce number of columns to improve performance -->
<bean id="SubscriptionItemStepOneWriter"
class="org.springframework.batch.item.database.JdbcBatchItemWriter" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql">
<value>
<![CDATA[
insert into CUST_SUB_INTER(ID,CITY_NAME,COUNTRY_NAME,PUBLICATION_NAME,SUBSCRIPTION_REF_CODE,SUBSCRIPTION_STATE,
UNSUBSCRIBED,CITY_ID,PERSON_ID,SUBSCRIPTION_CREATED_AT,SUBSCRIPTION_UPDATED_AT,END_VALUE,JOB_ID,
MIGRATION_FILE_NAME,RECORD_INSERT_DATE)
values (SEQ_CUST_SUB_ERROR.NEXTVAL, :city_name, :country_name, :publication_name,
:subscription_ref_code, :subscription_state, :unsubscribed,
:city_id, :person_id, :subscription_created_at, :subscription_updated_at, :end_value,
#{jobExecutionContext[jobId]}, '#{jobExecutionContext[tempSubscriptionFile]}', SYSTIMESTAMP)
]]>
</value>
</property>
<property name="itemSqlParameterSourceProvider">
<bean class="org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider" />
</property>
</bean>
<!-- scope step is critical here-->
<bean id="SubscriptionItemStepTwoReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step">
<property name="dataSource" ref="dataSource"/>
<property name="sql">
<value>
<![CDATA[
SELECT DISTINCT PERSON_ID FROM CUST_SUB_INTER where job_id = #{jobExecutionContext.jobId} order by PERSON_ID
]]>
</value>
</property>
<property name="rowMapper">
<bean class="org.springframework.jdbc.core.SingleColumnRowMapper" p:requiredType="java.lang.Long"/>
</property>
</bean>
<bean id="SubscriptionItemStepTwoWriter" class="uk.co.batch.datamigration.subscription.SubscriptionItemStepTwoWriter" scope="step">
<property name="fileName" value="#{jobExecutionContext[tempSubscriptionFile]}" />
<property name="jobId" value="#{jobExecutionContext.jobId}" />
<property name="errorSaving" value="${Job.errorSaving}"/>
</bean>
<bean id="SubscriptionTaskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="10"/>
</bean>
<bean id="SubscriptionJobListener" class="uk.co.batch.datamigration.JobListener">
<property name="sessionFactory" ref="sftpSessionFactory"/>
<property name="remotePath" value="${Job.subscription.sftp.remoteDirectory}"/>
<property name="remoteExtension" value="${Job.sftp.remoteExtension}"/>
<property name="renameRemoteFile" value="true"/>
<property name="jobContextSingleFileParameterName" value="tempSubscriptionFile"/>
<property name="batchAdministratorUserId" value="${Job.batchAdministratorUserId}"/>
</bean>
<bean id="copySubscriptionFileLocally" class="uk.co.and.batch.core.LocalCopyTasklet" scope="prototype">
<property name="deleteRemoteFile" value="false"/>
<property name="fileNamePattern" value="${Job.subscription.filePattern}"/>
<property name="jobContextSingleFileParameterName" value="tempSubscriptionFile"/>
<property name="localPath" value="${Job.subscription.sftp.localDirectory}"/>
<property name="mustMatch" value="true"/>
<property name="remotePath" value="${Job.subscription.sftp.remoteDirectory}"/>
<property name="remoteSessionFactory" ref="sftpSessionFactory"/>
<property name="maxNumberDownload" value="1"/>
<property name="resolver">
<bean class="uk.co.and.batch.core.remote.FileNameResolvers.SFtpFileNameResolver" />
</property>
</bean>
<bean id="sftpSessionFactory" class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="ftp.co.uk"/>
<property name="port" value="22"/>
<property name="user" value=""/>
<property name="password" value=""/>
<property name="proxy" value="#{ ${proxyRef} }"/>
</bean>
</beans>
I'm failling to use Spring4 togather with Hibernate4 in a standalone process (no container like tomcat, WAS, ...)
How can I use Hibernate4, Spring4 and Spring data repositories togather in a standalone process?
However I confiugre Spring I allways get the same exception:
Caused by: java.lang.NullPointerException
at org.hibernate.engine.transaction.internal.jta.JtaStatusHelper.getStatus(JtaStatusHelper.java:76)
at org.hibernate.engine.transaction.internal.jta.JtaStatusHelper.isActive(JtaStatusHelper.java:118)
at org.hibernate.engine.transaction.internal.jta.CMTTransaction.join(CMTTransaction.java:149)
When googling for this, I get pointet to some information about hibernate.transaction.jta.platform and the docu for Hibernate 4.3 is here http://docs.jboss.org/hibernate/orm/4.3/devguide/en-US/html_single/#services-JtaPlatform
But the only option I see for my case would be org.hibernate.engine.transaction.jta.platform.internal.NoJtaPlatform but this still leads to the same error.
Here is my Spring config:
<context:component-scan base-package="com.xx.yy" />
<jpa:repositories base-package="com.xx.zz.respositories"></jpa:repositories>
<bean name="dataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="com.mysql.jdbc.Driver" />
<property name="url" value="jdbc:mysql://localhost:3306/culture" />
<property name="username" value="root" />
<property name="password" value="" />
</bean>
<tx:annotation-driven transaction-manager="transactionManager" />
<bean class="org.springframework.orm.jpa.JpaTransactionManager" id="transactionManager">
<property name="entityManagerFactory" ref="entityManagerFactory" />
<property name="jpaDialect">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaDialect" />
</property>
</bean>
<bean id="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean">
<property name="jtaDataSource" ref="dataSource" />
<property name="packagesToScan" value="culture.matching.index.model" />
<property name="jpaVendorAdapter">
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
<property name="generateDdl" value="true" />
<property name="showSql" value="false" />
<property name="databasePlatform" value="org.hibernate.dialect.MySQLDialect" />
<property name="database" value="MYSQL" />
</bean>
</property>
<property name="jpaProperties">
<value>
hibernate.transaction.jta.platform=org.hibernate.engine.transaction.jta.platform.internal.NoJtaPlatform
</value>
</property>
</bean>
Answer by #geoand helped a lot: https://github.com/spring-projects/spring-boot/tree/master/spring-boot-samples/spring-boot-sample-simple
I therefore moved from XML to Java config
We are working on a Spring based Web application where the key to the business is high availability. Hibernate is the ORM and MySQL is the DB that is used. Our architecture forces us to have the following mechanism.
The Webapp first tries to connect to the primary MySQL server.
If that fails, it connects to the Secondary MySQL server, which is mostly out of sync with the data.
The webapp needs to know which MySQL Server it is connected to, since we want to notify the user when he is using the secondary server.
As soon as the connection re-establishes with the primary, the connected has to be switched from secondary to primary.
I am stuck at the very first phase. I am unable find out how to direct Spring/Hibernate to use multiple DB Servers.
Here is the current config file (removing the unwanted stuff):
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:aop="http://www.springframework.org/schema/aop"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:jee="http://www.springframework.org/schema/jee" xmlns:lang="http://www.springframework.org/schema/lang"
xmlns:p="http://www.springframework.org/schema/p" xmlns:tx="http://www.springframework.org/schema/tx"
xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/jee http://www.springframework.org/schema/jee/spring-jee.xsd
http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang.xsd
http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd">
<context:annotation-config />
<context:component-scan base-package="com.smartshop" />
<bean
class="org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor" />
<bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource"
destroy-method="close">
<property name="driverClassName" value="com.mysql.jdbc.Driver" />
<property name="url" value="jdbc:mysql://localhost:3306/primarydb" />
<property name="username" value="username" />
<property name="password" value="password" />
<property name="maxIdle" value="10" />
<property name="maxActive" value="100" />
<property name="maxWait" value="10000" />
<property name="validationQuery" value="select 1" />
<property name="testOnBorrow" value="false" />
<property name="testWhileIdle" value="true" />
<property name="timeBetweenEvictionRunsMillis" value="1200000" />
<property name="minEvictableIdleTimeMillis" value="1800000" />
<property name="numTestsPerEvictionRun" value="5" />
<property name="defaultAutoCommit" value="false" />
</bean>
<bean id="sessionFactory"
class="org.springframework.orm.hibernate3.LocalSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="configLocation">
<value>/WEB-INF/hibernate.cfg.xml</value>
</property>
<property name="configurationClass">
<value>org.hibernate.cfg.AnnotationConfiguration</value>
</property>
<property name="hibernateProperties">
<props>
<prop key="hibernate.dialect">org.hibernate.dialect.MySQLDialect</prop>
<prop key="hibernate.show_sql">false</prop>
</props>
</property>
</bean>
<tx:annotation-driven />
<bean id="transactionManager"
class="org.springframework.orm.hibernate3.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
<bean
class="org.springframework.orm.hibernate3.support.OpenSessionInViewInterceptor"
name="openSessionInViewInterceptor">
<property name="sessionFactory" ref="sessionFactory"></property>
<property name="flushMode">
<bean
id="org.springframework.orm.hibernate3.HibernateAccessor.FLUSH_AUTO"
class="org.springframework.beans.factory.config.FieldRetrievingFactoryBean" />
</property>
</bean>
<bean id="handlerMapping"
class="org.springframework.web.servlet.mvc.annotation.DefaultAnnotationHandlerMapping">
<property name="interceptors">
<list>
<ref bean="localeChangeInterceptor" />
<ref bean="openSessionInViewInterceptor" />
</list>
</property>
</bean>
Is there a way to define Spring to connect to a backup datasource when the primary datasource is inaccessible?
if you configure your datasource as a jndi datasource you can use the following configuration
<bean id="dataSource"
class="org.springframework.jndi.JndiObjectFactoryBean">
<property name="jndiName" ref="datasourceJNDIName" />
<property name="defaultObject" ref="fallBackDataSource" />
</bean>
<!-- fall back datasource if JNDI look up of main datasource fails -->
<bean id="fallBackDataSource"
class="org.springframework.jndi.JndiObjectFactoryBean">
<property name="jndiName" ref="datasourceJNDIName-2" />
</bean>
This kind of tricks have to be done on MySQL side not on the webapp. MySQL cluster for instance can handle this.
More infos here:
-http://www.mysql.com/products/cluster/
-http://en.wikipedia.org/wiki/MySQL_Cluster
UPDATE:
Ok, so, have a look here -> org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource . Build a custom implementation that override the method getConnection() and getConnection(String username, String password). Surroud them by catching SQLException, and if occurs, choose the other datasource.