Attempt to process next row failed . Closed Resultset error - java

Getting below error while executing a batch job. Not immediate but after half an hour.
2016-02-17 15:24:25,106 ERROR [AbstractStep ][poolTaskExecutor-2 ] Encountered an error executing step processSubscriptionFile-stepTwo in job subscriptionJob
org.springframework.jdbc.UncategorizedSQLException: Attempt to process next row failed; uncategorized SQLException for SQL [
SELECT DISTINCT PERSON_ID FROM CUST_SUB_INTER where job_id = 77317301 order by PERSON_ID
]; SQL state [99999]; error code [17010]; Closed Resultset: next; nested exception is java.sql.SQLException: Closed Resultset: next
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:84)
at
My batch job files is as below. In first step I am reading the data from a file and adding to an intermediate table. This chunk uses partition and multiple threads. This step uses partitions and threads. In second step I am reading the data from this temporary table as distinct customer ids and passing it to writer. It is failing halfway through. There are 70000 records in intermediate table and 10000 distinct customer ids. Second step does not use partition and threads. Please help
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"
default-autowire="byName">
<bean id="SubscriptionJob-jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass" value="uk.co.batch.datamigration.JobLaunchDetails"/>
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="SubscriptionJob" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>
<!-- Job -->
<batch:job id="SubscriptionJob" restartable="false">
<batch:step id="copySubscriptionFile" next="processSubscriptionFile">
<tasklet ref="copySubscriptionFileLocally" />
</batch:step>
<batch:step id="processSubscriptionFile" next="processSubscriptionFile-stepTwo">
<partition step="processSubscriptionFile-stepOne" partitioner="SubscriptionPartitioner">
<handler grid-size="20" task-executor="SubscriptionTaskExecutor"/>
</partition>
</batch:step>
<batch:step id="processSubscriptionFile-stepTwo">
<batch:tasklet>
<batch:chunk reader="SubscriptionItemStepTwoReader" writer="SubscriptionItemStepTwoWriter" commit-interval="200"/>
</batch:tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="SubscriptionJobListener"/>
<batch:listener ref="SubscriptionJobNotifier"/>
</batch:listeners>
</batch:job>
<batch:step id="processSubscriptionFile-stepOne">
<batch:tasklet>
<batch:chunk reader="SubscriptionItemStepOneReader" writer="SubscriptionItemStepOneWriter" commit-interval="200"/>
</batch:tasklet>
</batch:step>
<bean id ="SubscriptionJobNotifier" class="uk.co.and.batch.status.JobExecutionNotifier">
<property name="snsEventType" value="SUBSCRIPTION_JOB_NOTIFICATION"/>
<property name="onlyNotifyFailures" value="true"/>
</bean>
<bean id="SubscriptionPartitioner" class="uk.co.batch.datamigration.FlatFilePartitioner" scope="step">
<property name="resource" value="file:#{jobExecutionContext[tempSubscriptionFile]}" />
</bean>
<bean id="SubscriptionItemStepOneReader" scope="step" autowire-candidate="false" parent="SubscriptionItemStepOneReaderParent">
<property name="resource" value="file:#{jobExecutionContext[tempSubscriptionFile]}" />
<property name="startAt" value="#{stepExecutionContext['startAt']}"/>
<property name="itemsCount" value="#{stepExecutionContext['itemsCount']}"/>
</bean>
<bean id="SubscriptionItemStepOneReaderParent" abstract="true"
class="uk.co.batch.datamigration.MultiThreadedFlatFileItemReader">
<property name="linesToSkip" value="1"/>
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="^"/>
<property name="names">
<list >
<value>city_name</value>
<value>country_name</value>
<value>publication_name</value>
<value>subscription_ref_code</value>
<value>subscription_state</value>
<value>unsubscribed</value>
<value>city_id</value>
<value>person_id</value>
<value>subscription_created_at</value>
<value>subscription_updated_at</value>
<value>end_value</value>
</list>
</property>
</bean>
</property>
<property name="fieldSetMapper">
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="SubscriptionVO"/>
</bean>
</property>
</bean>
</property>
<property name="recordSeparatorPolicy">
<bean class="org.springframework.batch.item.file.separator.DefaultRecordSeparatorPolicy"/>
</property>
</bean>
<bean id="SubscriptionVO" class="uk.co.batch.datamigration.subscription.SubscriptionVO" scope="prototype"/>
<!-- TODO:Reduce number of columns to improve performance -->
<bean id="SubscriptionItemStepOneWriter"
class="org.springframework.batch.item.database.JdbcBatchItemWriter" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql">
<value>
<![CDATA[
insert into CUST_SUB_INTER(ID,CITY_NAME,COUNTRY_NAME,PUBLICATION_NAME,SUBSCRIPTION_REF_CODE,SUBSCRIPTION_STATE,
UNSUBSCRIBED,CITY_ID,PERSON_ID,SUBSCRIPTION_CREATED_AT,SUBSCRIPTION_UPDATED_AT,END_VALUE,JOB_ID,
MIGRATION_FILE_NAME,RECORD_INSERT_DATE)
values (SEQ_CUST_SUB_ERROR.NEXTVAL, :city_name, :country_name, :publication_name,
:subscription_ref_code, :subscription_state, :unsubscribed,
:city_id, :person_id, :subscription_created_at, :subscription_updated_at, :end_value,
#{jobExecutionContext[jobId]}, '#{jobExecutionContext[tempSubscriptionFile]}', SYSTIMESTAMP)
]]>
</value>
</property>
<property name="itemSqlParameterSourceProvider">
<bean class="org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider" />
</property>
</bean>
<!-- scope step is critical here-->
<bean id="SubscriptionItemStepTwoReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step">
<property name="dataSource" ref="dataSource"/>
<property name="sql">
<value>
<![CDATA[
SELECT DISTINCT PERSON_ID FROM CUST_SUB_INTER where job_id = #{jobExecutionContext.jobId} order by PERSON_ID
]]>
</value>
</property>
<property name="rowMapper">
<bean class="org.springframework.jdbc.core.SingleColumnRowMapper" p:requiredType="java.lang.Long"/>
</property>
</bean>
<bean id="SubscriptionItemStepTwoWriter" class="uk.co.batch.datamigration.subscription.SubscriptionItemStepTwoWriter" scope="step">
<property name="fileName" value="#{jobExecutionContext[tempSubscriptionFile]}" />
<property name="jobId" value="#{jobExecutionContext.jobId}" />
<property name="errorSaving" value="${Job.errorSaving}"/>
</bean>
<bean id="SubscriptionTaskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="10"/>
</bean>
<bean id="SubscriptionJobListener" class="uk.co.batch.datamigration.JobListener">
<property name="sessionFactory" ref="sftpSessionFactory"/>
<property name="remotePath" value="${Job.subscription.sftp.remoteDirectory}"/>
<property name="remoteExtension" value="${Job.sftp.remoteExtension}"/>
<property name="renameRemoteFile" value="true"/>
<property name="jobContextSingleFileParameterName" value="tempSubscriptionFile"/>
<property name="batchAdministratorUserId" value="${Job.batchAdministratorUserId}"/>
</bean>
<bean id="copySubscriptionFileLocally" class="uk.co.and.batch.core.LocalCopyTasklet" scope="prototype">
<property name="deleteRemoteFile" value="false"/>
<property name="fileNamePattern" value="${Job.subscription.filePattern}"/>
<property name="jobContextSingleFileParameterName" value="tempSubscriptionFile"/>
<property name="localPath" value="${Job.subscription.sftp.localDirectory}"/>
<property name="mustMatch" value="true"/>
<property name="remotePath" value="${Job.subscription.sftp.remoteDirectory}"/>
<property name="remoteSessionFactory" ref="sftpSessionFactory"/>
<property name="maxNumberDownload" value="1"/>
<property name="resolver">
<bean class="uk.co.and.batch.core.remote.FileNameResolvers.SFtpFileNameResolver" />
</property>
</bean>
<bean id="sftpSessionFactory" class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="ftp.co.uk"/>
<property name="port" value="22"/>
<property name="user" value=""/>
<property name="password" value=""/>
<property name="proxy" value="#{ ${proxyRef} }"/>
</bean>
</beans>

Related

Spring batch 3600 partitions long pause before starting

I have the following spring batch job with a partitioned step that creates 3600 partitions for a partitioned step. I use a ThreadPoolTaskExecutor with a max pool size 100 and a queue capacity of 100 (although it seems to make no difference for speed). Im using Visual VM to monitor the threads and I notice the taskExecutor threads don't start until > 5 minutes after starting the job.
Oddly enough, If I limit the number of partitions to 100 the threads start fairly quickly and finish in about a minute.
Another issue I notice is that there doesn't seem to be more than one database connection ever as seen in the VisualVM thread visualization
Can someone please review my batch job below and tell me if I am missing something that would limit the number of database connections to 1? Also, why would adding more partitions affect the performance if my ThreadPoolTaskExecutor parameters don't change? Shouldn't the jobs just sit in a queue until there is a thread able to service them?
--- Spring batch job ---
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:util="http://www.springframework.org/schema/util"
xmlns:lang="http://www.springframework.org/schema/lang"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.0.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd
http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.0.xsd">
<context:property-placeholder location="${ext.properties.dataManipulation.Properties}"/>
<import resource="${ext.properties.dataManipulation.Connection}"/>
<import resource="flatFileLineProperties.xml"/>
<!-- JobRepository and JobLauncher are configuration/setup classes -->
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean"/>
<bean id="transactionManager"
class="org.springframework.batch.support.transaction.ResourcelessTransactionManager"/>
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository"/>
</bean>
<util:map id="keys" map-class="java.util.HashMap">
<entry key="SAILING_ID" value="ASCENDING" value-type="org.springframework.batch.item.database.Order"/>
<entry key="RES_ID" value="ASCENDING" value-type="org.springframework.batch.item.database.Order" />
</util:map>
<!-- Here is my partioned step -->
<bean id="reservationsItemReader" class="org.springframework.batch.item.database.JdbcPagingItemReader" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="queryProvider">
<bean class="org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="selectClause">
<value>
<![CDATA[
GUEST_ID,
FIRST_NAME,
LAST_NAME,
TITLE,
HOUSEHOLD_NAME,
SAILING_ID,
RES_ID
]]>
</value>
</property>
<property name="fromClause" value="FROM RESERVATION "/>
<property name="whereClause" >
<value>
<![CDATA[ AND SAIL_ID = :sailId
]]>
</value>
</property>
<!--<property name="sortKey" value="SAILING_ID" />-->
<property name="sortKeys" ref="keys"/>
<!--<property name="sortKeys" ref="sortKeys"/>-->
</bean>
</property>
<property name="parameterValues">
<map>
<!--<entry key="shipCode" value="#{stepExecutionContext[shipCode]}" />-->
<entry key="sailId" value="#{stepExecutionContext[sailId]}" />
</map>
</property>
<!--property name="pageSize" value="500000" /-->
<property name="pageSize" value="40000" />
<property name="rowMapper">
<bean class="com.ncl.endeca.mapper.ColumnToHashMapper" />
</property>
</bean>
<bean id="sortKeys" class="java.util.HashMap" scope="prototype" >
<constructor-arg>
<map key-type="java.lang.String" value-type="org.springframework.batch.item.database.Order">
<entry key="SAILING_ID" value="ASCENDING" />
<entry key="RES_ID" value="ASCENDING" />
</map>
</constructor-arg>
</bean>
<util:list id="client_fields" value-type="java.lang.String">
<value>FIRST_NAME</value>
<value>LAST_NAME</value>
<value>TITLE</value>
<value>HOUSEHOLD_NAME</value>
</util:list>
<bean id="reservationsItemWriter" class="com.ncl.endeca.writer.ReservationWriter" scope="step">
<property name="guestFields" ref="client_fields" />
<property name="outPrefix" value="${file.out.prefix}" />
<property name="shipCode" value="#{stepExecutionContext[shipCode]}" />
<property name="sailId" value="#{stepExecutionContext[sailId]}" />
<property name="soldOutSailings" ref="soldOutSailingsList" />
</bean>
<bean id="yearsAgo" class="java.lang.Integer">
<constructor-arg>
<value>${yearsAgo}</value>
</constructor-arg>
</bean>
<bean id="yearsAhead" class="java.lang.Integer">
<constructor-arg>
<value>${yearsAhead}</value>
</constructor-arg>
</bean>
<bean id="resPartitioner" class="com.ncl.endeca.partition.ReservationPartitioner">
<property name="yearsAgo" ref="yearsAgo" />
<property name="yearsAhead" ref="yearsAhead" />
<property name="batchLimit" value="${batch.limit}" />
<property name="dataSource" ref="dataSource"/>
</bean>
<bean id="taskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="${batch.corePoolSize}" />
<property name="maxPoolSize" value="${batch.maxPoolSize}" />
<property name="queueCapacity" value="${batch.queueCapacity}" />
</bean>
<!--<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor"/>-->
<!-- each thread will run this job, with different stepExecutionContext values. -->
<step id="slave" xmlns="http://www.springframework.org/schema/batch" >
<flow parent="readReservations"/>
</step>
<!--<bean id="countrySpecificCompletionPolicy" class="org.springframework.batch.core.resource.StepExecutionSimpleCompletionPolicy">-->
<!--<property name="keyName" value="sailId"/>-->
<!--</bean>-->
<batch:flow id="readReservations">
<batch:step id="reservations" xmlns="http://www.springframework.org/schema/batch" >
<tasklet throttle-limit="${batch.corePoolSize}">
<chunk reader="reservationsItemReader" writer="reservationsItemWriter" commit-interval="50000" />
</tasklet>
</batch:step>
</batch:flow>
<!-- Actual Job -->
<batch:job id="dataManipulationJob">
<batch:step id="masterStep">
<batch:partition step="slave" partitioner="resPartitioner">
<batch:handler grid-size="100" task-executor="taskExecutor" />
</batch:partition>
</batch:step>
</batch:job>
I have tried BasicDataSource and Hikari connections but the pool sizes have no affect when I monitor VisualVM
---- connection.xml ----
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:util="http://www.springframework.org/schema/util"
xmlns:lang="http://www.springframework.org/schema/lang"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.0.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd
http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.0.xsd">
<bean id="hikariConfig" class="com.zaxxer.hikari.HikariConfig">
<property name="poolName" value="springHikariCP" />
<property name="connectionTestQuery" value="SELECT 10 from dual" />
<property name="dataSourceClassName" value="${hibernate.dataSourceClassName}" />
<property name="maximumPoolSize" value="${batch.maxPoolSize}" />
<property name="idleTimeout" value="${hibernate.hikari.idleTimeout}" />
<property name="dataSourceProperties">
<props>
<prop key="url">${dataSource.url}</prop>
<prop key="user">${dataSource.username}</prop>
<prop key="password">${dataSource.password}</prop>
</props>
</property>
</bean>
<!-- HikariCP configuration -->
<!--<bean id="dataSource" class="com.zaxxer.hikari.HikariDataSource" destroy-method="close">-->
<!--<constructor-arg ref="hikariConfig" />-->
<!--</bean>-->
<!--<bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close" scope="step">-->
<!--<property name="driverClassName" value="${hibernate.dataSourceClassName}" />-->
<!--<property name="url" value="${dataSource.url}" />-->
<!--<property name="username" value="${dataSource.username}" />-->
<!--<property name="password" value="${dataSource.password}" />-->
<!--<property name="testWhileIdle" value="false"/>-->
<!--<property name="maxActive" value="${batch.corePoolSize}"/>-->
<!--</bean>-->
<!-- connect to database -->
<bean id="dataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource" >
<property name="driverClassName" value="oracle.jdbc.OracleDriver" />
<property name="url" value="jdbc:oracle:thin:#******" />
<property name="username" value="****" />
<property name="password" value="****" />
</bean>
The time to create partitions depends on the performance of your ReservationPartitioner as well as the number of partitions. Creating 3600 partitions means creating 3600 StepExecution/ExecutionContext objects and persisting them in the corresponding tables. This can take time for such a high number of partitions.
In regards to the database connection, you are using the MapJobRepositoryFactoryBean with a ResourcelessTransactionManager so there are no interactions with a database for Spring Batch meta-data. The only component that interacts with the database according to your configuration is the JdbcPagingItemReader (I don't know what is the type of your ReservationWriter) so it is not surprising to see a single database connection.

apche ignite client and server configuration for production

I have done below server configuration for ignite.
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
">
<description>Main Spring file for ignite configuration.</description>
<bean id="cacheIgniteBean" class="org.apache.ignite.IgniteSpringBean">
<property name="configuration">
<bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="dataStorageConfiguration">
<bean class="org.apache.ignite.configuration.DataStorageConfiguration">
<property name="dataRegionConfigurations">
<list>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="item_cache_Region"/>
<property name="initialSize" value="${ignite.config.item.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.item.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
</bean>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="location_cache_Region"/>
<property name="initialSize" value="${ignite.config.location.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.location.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
<property name="persistenceEnabled"
value="${ignite.config.cache.location.native.persistence.enabled}"/>
</bean>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="order_cache_Region"/>
<property name="initialSize" value="${ignite.config.order.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.order.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
</bean>
</list>
</property>
</bean>
</property>
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="item"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="cacheStoreFactory">
<bean class="javax.cache.configuration.FactoryBuilder" factory-method="factoryOf">
<constructor-arg value="com.tgt.gom.cacheserver.store.ItemCacheStore"/>
</bean>
</property>
<property name="readThrough" value="${ignite.config.cache.item.readThrough}"/>
<property name="writeThrough" value="${ignite.config.cache.item.writeThrough}"/>
<property name="writeBehindEnabled" value="${ignite.config.cache.item.writeBehindEnabled}"/>
<property name="writeBehindFlushSize"
value="${ignite.config.cache.item.writeBehindFlushSize}"/>
<property name="writeBehindFlushFrequency"
value="${ignite.config.cache.item.writeBehindFlushFrequency}"/>
<property name="writeBehindFlushThreadCount"
value="${ignite.config.cache.item.writeBehindFlushThreadCount}"/>
<property name="writeBehindBatchSize"
value="${ignite.config.cache.item.writeBehindBatchSize}"/>
<property name="dataRegionName" value="item_cache_Region"/>
</bean>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="location"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="dataRegionName" value="location_cache_Region"/>
</bean>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="order"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="eagerTtl" value="true"/>
<property name="dataRegionName" value="order_cache_Region"/>
</bean>
</list>
</property>
<property name="failureDetectionTimeout" value="60000"/>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses">
<list>
<value>127.0.0.1:47500..47505</value>
<value>10.60.158.197:47500..47505</value>
<value>10.60.158.196:47500..47505</value>
<value>10.60.158.20:47500..47505</value>
<value>10.60.158.2:47500..47505</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</beans>
and below client configuration.
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
">
<description>Main Spring file for ignite configuration.</description>
<bean id="cacheIgniteBean" class="org.apache.ignite.IgniteSpringBean">
<property name="configuration">
<bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="clientMode" value="true"/>
<property name="dataStorageConfiguration">
<bean class="org.apache.ignite.configuration.DataStorageConfiguration">
<property name="dataRegionConfigurations">
<list>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="order_cache_Region"/>
<property name="initialSize" value="${ignite.config.order.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.order.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
</bean>
</list>
</property>
</bean>
</property>
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="order"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="eagerTtl" value="true"/>
<property name="dataRegionName" value="order_cache_Region"/>
</bean>
</list>
</property>
<property name="clientFailureDetectionTimeout" value="50000"/>
<property name="failureDetectionTimeout" value="50000"/>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="networkTimeout" value="20000"/>
<property name="forceServerMode" value="${ignite.client.force.server.mode}"/>
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses">
<list>
<value>10.60.158.197:47500..47505</value>
<value>10.60.158.196:47500..47505</value>
<value>10.60.158.20:47500..47505</value>
<value>10.60.158.2:47500..47505</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</beans>
I have 4 server nodes and 2 client nodes.Am i missing anything in configuration which may cause performance issues.I am getting below warnings sometime on client and server boxes.
server node warnings-:
e41fd3-846c-4086-a7bf-486616594368]","logger_name":"org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi","thread_name":"grid-nio-worker-tcp-comm-2-#27","level":"INFO","level_value":20000}
{"#timestamp":"2018-02-08T23:36:22.177-05:00","#version":1,"message":"Accepted incoming communication connection [locAddr=/192.168.208.5:47100, rmtAddr=/192.168.208.10:58392]","logger_name":"org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi","thread_name":"grid-nio-worker-tcp-comm-3-#28","level":"INFO","level_value":20000}
{"#timestamp":"2018-02-08T23:36:22.177-05:00","#version":1,"message":"Received incoming connection when already connected to this node, rejecting [locNode=434ef9a9-eb32-4096-8f80-843486a89137, rmtNode=65e41fd3-846c-4086-a7bf-486616594368]","logger_name":"org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi","thread_name":"grid-nio-worker-tcp-comm-3-#28","level":"INFO","level_value":20000}
clent node warnings-:
{"#timestamp":"2018-02-08T23:27:30.904-05:00","#version":1,"message":"Found long running cache future [startTime=23:25:45.075, curTime=23:27:30.898, fut=GridPartitionedSingleGetFuture [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], key=UserKeyCacheObjectImpl [part=239, val=20170627035520067938, hasValBytes=true], readThrough=true, forcePrimary=false, futId=a63a0367161-9c6368e5-8fa4-4550-abf6-02f8ef50125e, trackable=true, subjId=65e41fd3-846c-4086-a7bf-486616594368, taskName=null, deserializeBinary=true, skipVals=false, expiryPlc=CacheExpiryPolicy [], canRemap=true, needVer=false, keepCacheObjects=false, recovery=false, node=TcpDiscoveryNode [id=434ef9a9-eb32-4096-8f80-843486a89137, addrs=[127.0.0.1, 192.168.208.5], sockAddrs=[/192.168.208.5:47500, /127.0.0.1:47500], discPort=47500, order=1, intOrder=1, lastExchangeTime=1518106106491, loc=false, ver=2.3.0#20171028-sha1:8add7fd5, isClient=false]]]","logger_name":"org.apache.ignite.internal.diagnostic","thread_name":"grid-timeout-worker-#23","level":"WARN","level_value":30000}
org.apache.ignite.internal.processors.cache.CacheStoppedException: Failed to perform cache operation (cache is stopped):
Can please anyone suggest anything if i am missing in configurations for production environments.?
And any idea about following metricLogs.
Metrics for local node (to disable set 'metricsLogFrequency' to 0)\n ^-- Node [id=1c4d9a3e, uptime=15:49:04.761]\n ^-- H/N/C [hosts=4, nodes=4, CPUs=16]\n ^-- CPU [cur=0.17%, avg=0.27%, GC=0%]\n ^-- PageMemory [pages=1624]\n ^-- Heap [used=510MB, free=91.55%, comm=6036MB]\n ^-- Non heap [used=117MB, free=-1%, comm=119MB]\n ^-- Public thread pool [active=0, idle=0, qSize=0]\n ^-- System thread pool [active=0, idle=6, qSize=0]\n ^-- Outbound messages queue [size=0]","logger_name":"org.apache.ignite.internal.IgniteKernal","thread_name":"grid-timeout-worker-#23","level":"INFO","level_value":20000}
here it is saying that non heap memory free is -1%.In dataregion configuration i have given min 1 gb and max 2 gb memory.Any idea how to take care of this non heap memory?
This looks like this issue: https://issues.apache.org/jira/browse/IGNITE-6818
It's fixed in 2.4 which will be released soon. I would recommend to monitor Ignite dev list for further announcements. You can also build from master to check it it resolves the issue or not.

dynamically replace the value to placeholder in config xml file in Spring Batch

I am using Spring Batch 2 version. I am reading data from database using JdbcCursorItemReader.I have successfully fetched the data and also written it to a file.
Below is itemReader bean defined in Job.xml File::
<bean id="itemReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader"
scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql"
value="select u.ID, u.USER_LOGIN, u.PASSWORD, u.AGE from USERS u" />
</property>
<property name="rowMapper">
<bean class="com.example.UserRowMapper" />
</property>
</bean>
But the issue is,my query is quite big so I just want to keep that query out of xml file and get that query from other file or property file(.property,yaml or xml).
So that I can write xml code as below::
<bean id="itemReader" class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql" value="$sql_query" />
</property><property name="rowMapper">
<bean class="com.example.UserRowMapper" />
</property>
</bean>
What is best way to achieve this?
<bean id="myProperties" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="locations">
<list>
<value>path1.properties</value>
<value>path2.properties</value>
.....
</list>
</property>
<property name="ignoreUnresolvablePlaceholders" value="false"/>
</bean>
...
<bean id="itemReader" class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql" value="${sql_query}" />
</property><property name="rowMapper">
<bean class="com.example.UserRowMapper" />
</property>
</bean>
path1.properties:
sql_query=value
PropertySourcesPlaceholderConfigurer is preffered in 3.1 and higher, instead of PropertyPlaceholderConfigurer
You can add sql in jobexecutioncontext by using job listener before step.

Spring Batch: Fill n files but the next step sees empty

I implemented a step in spring batch that reads records from the database and generates output files.
The step is partitioned so that each slave step generates a different file.
Then I implemented a step which is executed after the slaves, and what it does is to read each of these
generated files and consolidate them into a single file (merge).
The problem I am having is that when run the consolidator step, the input files are empty (probably because
no flush was performed), then tried to configure forceSync = true and transactional = false in the slave writer, but without effect.
When they finish the job, the files generated by the slaves contains data, and the merge file is empty.
The problem, then, is that the input files are empty at the time of execution of consolidator step.
Can help me?, needs more information?
(sorry for my english)
thanks
<batch:job id="interfacesJob" xmlns="">
<batch:step id="step.1" parent="readerParent" next="step.2"/>
<batch:step id="step.2" parent="merge"/>
</batch:job>
<batch:step id="readerParent">
<batch:partition step="slave" partitioner="partitioner">
<batch:handler grid-size="50" task-executor="poolTaskExecutor" />
</batch:partition>
</batch:step>
<batch:step id="slave">
<batch:tasklet
transaction-manager="transactionManager">
<batch:chunk
reader="dummyReader"
processor="dummyProcessor"
writer="dummyWriter"
commit-interval="1"
skip-limit="50">
</batch:tasklet>
</batch:step>
<bean id="dummyWriter"
class="org.springframework.batch.item.file.FlatFileItemWriter"
scope="step" >
<property name="resource" value="file:operations_#{stepExecutionContext[fromId]}.txt" />
<property name="forceSync" value="true" />
<property name="transactional" value="false" />
<property name="lineAggregator">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="," /> <!-- default -->
<property name="fieldExtractor">
<bean class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="id,field1,field2" />
</bean>
</property>
</bean>
</property>
</bean>
<batch:step id="merge">
<tasklet>
<chunk reader="mergeReader" writer="mergeWriter" commit-interval="1" />
</tasklet>
</batch:step>
<bean id="mergeReader"
class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="resources" value="file:operations_*.txt" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="," />
<property name="names" value="id,field1,field2" />
</bean>
</property>
<property name="fieldSetMapper">
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="dummyPrototypeBean" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="mergeWriter"
class="org.springframework.batch.item.file.FlatFileItemWriter">
<property name="resource" value="file:final.txt" />
<property name="lineAggregator">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="," />
<property name="fieldExtractor">
<bean class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="id,field1,field2" />
</bean>
</property>
</bean>
</property>
</bean>
I taste with a MultiResourceItemReader for the mergeReader, but still continue with the same problem:
<bean id="mergeReader"
class="org.springframework.batch.item.file.MultiResourceItemReader">
<property name="resources" value="file:operations_*.txt" />
<property name="delegate" ref="mergeReaderSpecific" />
</bean>
<bean id="mergeReaderSpecific"
class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="," />
<property name="names" value="id,field1,field2" />
</bean>
</property>
<property name="fieldSetMapper">
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="dummyPrototypeBean" />
</bean>
</property>
</bean>
</property>
</bean>
Probably you have to use a MultiResourceItemReader as mergeReader using a FlatFileItemReader as delegate.
FlatFileItemReader.resource property is intended to be used on a single resource opposite to
MultiResourceItemReader.resources (plurals).

Meta tables for Spring Batch not created by Hibernate4

EDIT: ALthough i think hibernate should be able to do this automatically, i have a DDL scri[pt containing the structure I need. If i can run this script so that hibernate automatically create the tables, i'm happy for now... `see this question
My excuses for the lengthy post. My question, though might be quite simple, but being a hibernate and spring noob, this is bugging me for this whole morning....
I'm using spring batch. When running an import job, Spring wants to store meta data about job excution in a table named BATCH_JOB. However, this table is not created by Hibernate.
I run the job by:
public static void main(String[] args) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException {
ApplicationContext context = new ClassPathXmlApplicationContext("bean_configuration.xml");
SimpleJobLauncher launcher = (SimpleJobLauncher) context.getBean("jobLauncher");
FlowJob job = (FlowJob) context.getBean("importIVSJob");
launcher.run(job, new JobParameters());
}
My hibernate properties file:
#hibernate.c3p0.min_size=5
#hibernate.c3p0.max_size=20
#hibernate.c3p0.timeout=1800
#hibernate.c3p0.max_statements=50
hibernate.dialect=org.hibernate.dialect.HSQLDialect
hibernate.show_sql=true
hibernate.hbm2ddl.auto=create
and the xml config:
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/jdbc
http://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-2.5.xsd
http://www.springframework.org/schema/batch
http://www.springframework.org/schema/batch/spring-batch-2.1.xsd">
<batch:job job-repository="jobRepository" id="importIVSJob">
<batch:step id="step1">
<batch:tasklet transaction-manager="transactionManager">
<batch:chunk reader="csvFileReader" writer="itemWriter"
commit-interval="5" />
</batch:tasklet>
</batch:step>
</batch:job>
<bean id="csvFileReader" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="resource" ref="inputResource" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer"
p:delimiter=";"
p:names="objectcode, event, beginTijd, duurEvent, kolkNummer, deelkolkNummer, vaarrichting, eniNummer, aantalSchepen, scheepType, vaartType, subTypeVaart, rwsKlasseHoofgroep, rwsKlasseSubgroep, cemtKlasse, laadvermogen, dwtLaadvermogen, scheepslengte, scheepsbreedte, scheepsdiepgang, scheepshoogte, vlagCode, beladingsCode, cargoGewicht, seinVoeringKegel, vrachtAanBoord, aantalContainers, aantalTEU_containers, reisId, toerbeurt, invaartGroen, uitvaartGroen" />
</property>
<property name="fieldSetMapper">
<bean
class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper"
p:targetType="nl.triopsys.styx.importeer.IVS.IvsBericht" />
</property>
</bean>
</property>
</bean>
<bean id="itemWriter" class="org.springframework.batch.item.file.FlatFileItemWriter">
<property name="resource" ref="outputResource" />
<property name="lineAggregator">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="," />
<property name="fieldExtractor">
<bean
class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names"
value="objectcode, event, beginTijd, duurEvent, kolkNummer, deelkolkNummer, vaarrichting, eniNummer, aantalSchepen, scheepType, vaartType, subTypeVaart, rwsKlasseHoofgroep, rwsKlasseSubgroep, cemtKlasse, laadvermogen, dwtLaadvermogen, scheepslengte, scheepsbreedte, scheepsdiepgang, scheepshoogte, vlagCode, beladingsCode, cargoGewicht, seinVoeringKegel, vrachtAanBoord, aantalContainers, aantalTEU_containers, reisId, toerbeurt, invaartGroen, uitvaartGroen" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="inputResource" class="org.springframework.core.io.FileSystemResource">
<constructor-arg index="0" value="file:C:\Users\jgoddijn\testdata\IVS90.csv" />
</bean>
<bean id="outputResource" class="org.springframework.core.io.FileSystemResource">
<constructor-arg index="0" value="C:\Users\jgoddijn\testdata\output.csv" />
</bean>
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer"
p:location="batch.properties" p:ignoreUnresolvablePlaceholders="true" />
<bean id="dataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="org.hsqldb.jdbcDriver" />
<property name="url"
value="jdbc:hsqldb:file:/D:/DMVV/HSQLDB;shutdown=true;hsqldb.write_delay=false" />
<property name="username" value="sa" />
<property name="password" value="" />
</bean>
<bean id="transactionManager"
class="org.springframework.orm.hibernate4.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
<bean id="sessionFactory"
class="org.springframework.orm.hibernate4.LocalSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="annotatedClasses">
<list>
<value>nl.triopsys.styx.importeer.IVS.IvsBericht</value>
<value>nl.triopsys.styx.domain.Vaartuig</value>
<value>nl.triopsys.styx.domain.AIS.MMSI</value>
<value>nl.triopsys.styx.domain.IVS90.valuetype.Scheepsnummer</value>
<value>nl.triopsys.styx.domain.IVS90.valuetype.EniNummer</value>
<value>nl.triopsys.styx.domain.IVS90.valuetype.ImoNummer</value>
</list>
</property>
<property name="hibernateProperties">
<bean
class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="location" value="batch.properties" />
</bean>
</property>
</bean>
<bean id="jobRegistry"
class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher"
p:jobRepository-ref="jobRepository" />
<bean id="jobRegistryBeanPostProcessor"
class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor"
p:jobRegistry-ref="jobRegistry" />
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean"
p:dataSource-ref="dataSource" p:transactionManager-ref="transactionManager" />
</beans>
and the error i get:
Exception in thread "main" org.hibernate.exception.SQLGrammarException: user lacks privilege or object not found: BATCH_JOB_INSTANCE
at org.hibernate.exception.internal.SQLExceptionTypeDelegate.convert(SQLExceptionTypeDelegate.java:83)
at org.hibernate.exception.internal.StandardSQLExceptionConverter.convert(StandardSQLExceptionConverter.java:49)
at org.hibernate.engine.jdbc.spi.SqlExceptionHelper.convert(SqlExceptionHelper.java:125)
at org.hibernate.engine.jdbc.spi.SqlExceptionHelper.convert(SqlExceptionHelper.java:110)
at org.hibernate.engine.jdbc.internal.proxy.ConnectionProxyHandler.continueInvocation(ConnectionProxyHandler.java:146)
at org.hibernate.engine.jdbc.internal.proxy.AbstractProxyHandler.invoke(AbstractProxyHandler.java:81)
at $Proxy20.prepareStatement(Unknown Source)
at org.springframework.jdbc.core.JdbcTemplate$SimplePreparedStatementCreator.createPreparedStatement(JdbcTemplate.java:1436)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:581)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:637)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:666)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:674)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:714)
at org.springframework.jdbc.core.simple.SimpleJdbcTemplate.query(SimpleJdbcTemplate.java:204)
at org.springframework.jdbc.core.simple.SimpleJdbcTemplate.query(SimpleJdbcTemplate.java:209)
at org.springframework.batch.core.repository.dao.JdbcJobInstanceDao.getJobInstance(JdbcJobInstanceDao.java:221)
at org.springframework.batch.core.repository.support.SimpleJobRepository.getLastJobExecution(SimpleJobRepository.java:253)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:318)
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:183)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:150)
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:106)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:172)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:202)
at $Proxy19.getLastJobExecution(Unknown Source)
at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:94)
at nl.triopsys.styx.sandbox.importeer.RunImport.main(RunImport.java:39)
It seems that no table is created for the job execution. How to get hibernate to do this automatically?
Thank you!
The quickest way is to specify the script as an initialise parameter on the H2 DB URL:
jdbc:h2:~/mydb;init=runscript from 'classpath:org/springframework/batch/core/schema-h2.sql'
Providing the spring-batch-core jar is on your classpath it will create these tables on the first run of your test. You can then remove it once the tables are created.

Categories

Resources