apche ignite client and server configuration for production - java

I have done below server configuration for ignite.
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
">
<description>Main Spring file for ignite configuration.</description>
<bean id="cacheIgniteBean" class="org.apache.ignite.IgniteSpringBean">
<property name="configuration">
<bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="dataStorageConfiguration">
<bean class="org.apache.ignite.configuration.DataStorageConfiguration">
<property name="dataRegionConfigurations">
<list>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="item_cache_Region"/>
<property name="initialSize" value="${ignite.config.item.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.item.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
</bean>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="location_cache_Region"/>
<property name="initialSize" value="${ignite.config.location.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.location.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
<property name="persistenceEnabled"
value="${ignite.config.cache.location.native.persistence.enabled}"/>
</bean>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="order_cache_Region"/>
<property name="initialSize" value="${ignite.config.order.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.order.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
</bean>
</list>
</property>
</bean>
</property>
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="item"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="cacheStoreFactory">
<bean class="javax.cache.configuration.FactoryBuilder" factory-method="factoryOf">
<constructor-arg value="com.tgt.gom.cacheserver.store.ItemCacheStore"/>
</bean>
</property>
<property name="readThrough" value="${ignite.config.cache.item.readThrough}"/>
<property name="writeThrough" value="${ignite.config.cache.item.writeThrough}"/>
<property name="writeBehindEnabled" value="${ignite.config.cache.item.writeBehindEnabled}"/>
<property name="writeBehindFlushSize"
value="${ignite.config.cache.item.writeBehindFlushSize}"/>
<property name="writeBehindFlushFrequency"
value="${ignite.config.cache.item.writeBehindFlushFrequency}"/>
<property name="writeBehindFlushThreadCount"
value="${ignite.config.cache.item.writeBehindFlushThreadCount}"/>
<property name="writeBehindBatchSize"
value="${ignite.config.cache.item.writeBehindBatchSize}"/>
<property name="dataRegionName" value="item_cache_Region"/>
</bean>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="location"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="dataRegionName" value="location_cache_Region"/>
</bean>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="order"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="eagerTtl" value="true"/>
<property name="dataRegionName" value="order_cache_Region"/>
</bean>
</list>
</property>
<property name="failureDetectionTimeout" value="60000"/>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses">
<list>
<value>127.0.0.1:47500..47505</value>
<value>10.60.158.197:47500..47505</value>
<value>10.60.158.196:47500..47505</value>
<value>10.60.158.20:47500..47505</value>
<value>10.60.158.2:47500..47505</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</beans>
and below client configuration.
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
">
<description>Main Spring file for ignite configuration.</description>
<bean id="cacheIgniteBean" class="org.apache.ignite.IgniteSpringBean">
<property name="configuration">
<bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration">
<property name="clientMode" value="true"/>
<property name="dataStorageConfiguration">
<bean class="org.apache.ignite.configuration.DataStorageConfiguration">
<property name="dataRegionConfigurations">
<list>
<bean class="org.apache.ignite.configuration.DataRegionConfiguration">
<property name="name" value="order_cache_Region"/>
<property name="initialSize" value="${ignite.config.order.cache.initialSize}"/>
<property name="maxSize" value="${ignite.config.order.cache.maxSize}"/>
<property name="pageEvictionMode" value="RANDOM_LRU"/>
</bean>
</list>
</property>
</bean>
</property>
<property name="cacheConfiguration">
<list>
<bean class="org.apache.ignite.configuration.CacheConfiguration">
<property name="name" value="order"/>
<property name="cacheMode" value="PARTITIONED"/>
<property name="atomicityMode" value="ATOMIC"/>
<property name="backups" value="0"/>
<property name="eagerTtl" value="true"/>
<property name="dataRegionName" value="order_cache_Region"/>
</bean>
</list>
</property>
<property name="clientFailureDetectionTimeout" value="50000"/>
<property name="failureDetectionTimeout" value="50000"/>
<property name="discoverySpi">
<bean class="org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi">
<property name="networkTimeout" value="20000"/>
<property name="forceServerMode" value="${ignite.client.force.server.mode}"/>
<property name="ipFinder">
<bean class="org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder">
<property name="addresses">
<list>
<value>10.60.158.197:47500..47505</value>
<value>10.60.158.196:47500..47505</value>
<value>10.60.158.20:47500..47505</value>
<value>10.60.158.2:47500..47505</value>
</list>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</property>
</bean>
</beans>
I have 4 server nodes and 2 client nodes.Am i missing anything in configuration which may cause performance issues.I am getting below warnings sometime on client and server boxes.
server node warnings-:
e41fd3-846c-4086-a7bf-486616594368]","logger_name":"org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi","thread_name":"grid-nio-worker-tcp-comm-2-#27","level":"INFO","level_value":20000}
{"#timestamp":"2018-02-08T23:36:22.177-05:00","#version":1,"message":"Accepted incoming communication connection [locAddr=/192.168.208.5:47100, rmtAddr=/192.168.208.10:58392]","logger_name":"org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi","thread_name":"grid-nio-worker-tcp-comm-3-#28","level":"INFO","level_value":20000}
{"#timestamp":"2018-02-08T23:36:22.177-05:00","#version":1,"message":"Received incoming connection when already connected to this node, rejecting [locNode=434ef9a9-eb32-4096-8f80-843486a89137, rmtNode=65e41fd3-846c-4086-a7bf-486616594368]","logger_name":"org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi","thread_name":"grid-nio-worker-tcp-comm-3-#28","level":"INFO","level_value":20000}
clent node warnings-:
{"#timestamp":"2018-02-08T23:27:30.904-05:00","#version":1,"message":"Found long running cache future [startTime=23:25:45.075, curTime=23:27:30.898, fut=GridPartitionedSingleGetFuture [topVer=AffinityTopologyVersion [topVer=10, minorTopVer=0], key=UserKeyCacheObjectImpl [part=239, val=20170627035520067938, hasValBytes=true], readThrough=true, forcePrimary=false, futId=a63a0367161-9c6368e5-8fa4-4550-abf6-02f8ef50125e, trackable=true, subjId=65e41fd3-846c-4086-a7bf-486616594368, taskName=null, deserializeBinary=true, skipVals=false, expiryPlc=CacheExpiryPolicy [], canRemap=true, needVer=false, keepCacheObjects=false, recovery=false, node=TcpDiscoveryNode [id=434ef9a9-eb32-4096-8f80-843486a89137, addrs=[127.0.0.1, 192.168.208.5], sockAddrs=[/192.168.208.5:47500, /127.0.0.1:47500], discPort=47500, order=1, intOrder=1, lastExchangeTime=1518106106491, loc=false, ver=2.3.0#20171028-sha1:8add7fd5, isClient=false]]]","logger_name":"org.apache.ignite.internal.diagnostic","thread_name":"grid-timeout-worker-#23","level":"WARN","level_value":30000}
org.apache.ignite.internal.processors.cache.CacheStoppedException: Failed to perform cache operation (cache is stopped):
Can please anyone suggest anything if i am missing in configurations for production environments.?
And any idea about following metricLogs.
Metrics for local node (to disable set 'metricsLogFrequency' to 0)\n ^-- Node [id=1c4d9a3e, uptime=15:49:04.761]\n ^-- H/N/C [hosts=4, nodes=4, CPUs=16]\n ^-- CPU [cur=0.17%, avg=0.27%, GC=0%]\n ^-- PageMemory [pages=1624]\n ^-- Heap [used=510MB, free=91.55%, comm=6036MB]\n ^-- Non heap [used=117MB, free=-1%, comm=119MB]\n ^-- Public thread pool [active=0, idle=0, qSize=0]\n ^-- System thread pool [active=0, idle=6, qSize=0]\n ^-- Outbound messages queue [size=0]","logger_name":"org.apache.ignite.internal.IgniteKernal","thread_name":"grid-timeout-worker-#23","level":"INFO","level_value":20000}
here it is saying that non heap memory free is -1%.In dataregion configuration i have given min 1 gb and max 2 gb memory.Any idea how to take care of this non heap memory?

This looks like this issue: https://issues.apache.org/jira/browse/IGNITE-6818
It's fixed in 2.4 which will be released soon. I would recommend to monitor Ignite dev list for further announcements. You can also build from master to check it it resolves the issue or not.

Related

Attempt to process next row failed . Closed Resultset error

Getting below error while executing a batch job. Not immediate but after half an hour.
2016-02-17 15:24:25,106 ERROR [AbstractStep ][poolTaskExecutor-2 ] Encountered an error executing step processSubscriptionFile-stepTwo in job subscriptionJob
org.springframework.jdbc.UncategorizedSQLException: Attempt to process next row failed; uncategorized SQLException for SQL [
SELECT DISTINCT PERSON_ID FROM CUST_SUB_INTER where job_id = 77317301 order by PERSON_ID
]; SQL state [99999]; error code [17010]; Closed Resultset: next; nested exception is java.sql.SQLException: Closed Resultset: next
at org.springframework.jdbc.support.AbstractFallbackSQLExceptionTranslator.translate(AbstractFallbackSQLExceptionTranslator.java:84)
at
My batch job files is as below. In first step I am reading the data from a file and adding to an intermediate table. This chunk uses partition and multiple threads. This step uses partitions and threads. In second step I am reading the data from this temporary table as distinct customer ids and passing it to writer. It is failing halfway through. There are 70000 records in intermediate table and 10000 distinct customer ids. Second step does not use partition and threads. Please help
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:batch="http://www.springframework.org/schema/batch"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"
default-autowire="byName">
<bean id="SubscriptionJob-jobDetail" class="org.springframework.scheduling.quartz.JobDetailBean">
<property name="jobClass" value="uk.co.batch.datamigration.JobLaunchDetails"/>
<property name="jobDataAsMap">
<map>
<entry key="jobName" value="SubscriptionJob" />
<entry key="jobLocator" value-ref="jobRegistry" />
<entry key="jobLauncher" value-ref="jobLauncher" />
</map>
</property>
</bean>
<!-- Job -->
<batch:job id="SubscriptionJob" restartable="false">
<batch:step id="copySubscriptionFile" next="processSubscriptionFile">
<tasklet ref="copySubscriptionFileLocally" />
</batch:step>
<batch:step id="processSubscriptionFile" next="processSubscriptionFile-stepTwo">
<partition step="processSubscriptionFile-stepOne" partitioner="SubscriptionPartitioner">
<handler grid-size="20" task-executor="SubscriptionTaskExecutor"/>
</partition>
</batch:step>
<batch:step id="processSubscriptionFile-stepTwo">
<batch:tasklet>
<batch:chunk reader="SubscriptionItemStepTwoReader" writer="SubscriptionItemStepTwoWriter" commit-interval="200"/>
</batch:tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="SubscriptionJobListener"/>
<batch:listener ref="SubscriptionJobNotifier"/>
</batch:listeners>
</batch:job>
<batch:step id="processSubscriptionFile-stepOne">
<batch:tasklet>
<batch:chunk reader="SubscriptionItemStepOneReader" writer="SubscriptionItemStepOneWriter" commit-interval="200"/>
</batch:tasklet>
</batch:step>
<bean id ="SubscriptionJobNotifier" class="uk.co.and.batch.status.JobExecutionNotifier">
<property name="snsEventType" value="SUBSCRIPTION_JOB_NOTIFICATION"/>
<property name="onlyNotifyFailures" value="true"/>
</bean>
<bean id="SubscriptionPartitioner" class="uk.co.batch.datamigration.FlatFilePartitioner" scope="step">
<property name="resource" value="file:#{jobExecutionContext[tempSubscriptionFile]}" />
</bean>
<bean id="SubscriptionItemStepOneReader" scope="step" autowire-candidate="false" parent="SubscriptionItemStepOneReaderParent">
<property name="resource" value="file:#{jobExecutionContext[tempSubscriptionFile]}" />
<property name="startAt" value="#{stepExecutionContext['startAt']}"/>
<property name="itemsCount" value="#{stepExecutionContext['itemsCount']}"/>
</bean>
<bean id="SubscriptionItemStepOneReaderParent" abstract="true"
class="uk.co.batch.datamigration.MultiThreadedFlatFileItemReader">
<property name="linesToSkip" value="1"/>
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="delimiter" value="^"/>
<property name="names">
<list >
<value>city_name</value>
<value>country_name</value>
<value>publication_name</value>
<value>subscription_ref_code</value>
<value>subscription_state</value>
<value>unsubscribed</value>
<value>city_id</value>
<value>person_id</value>
<value>subscription_created_at</value>
<value>subscription_updated_at</value>
<value>end_value</value>
</list>
</property>
</bean>
</property>
<property name="fieldSetMapper">
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="SubscriptionVO"/>
</bean>
</property>
</bean>
</property>
<property name="recordSeparatorPolicy">
<bean class="org.springframework.batch.item.file.separator.DefaultRecordSeparatorPolicy"/>
</property>
</bean>
<bean id="SubscriptionVO" class="uk.co.batch.datamigration.subscription.SubscriptionVO" scope="prototype"/>
<!-- TODO:Reduce number of columns to improve performance -->
<bean id="SubscriptionItemStepOneWriter"
class="org.springframework.batch.item.database.JdbcBatchItemWriter" scope="step">
<property name="dataSource" ref="dataSource" />
<property name="sql">
<value>
<![CDATA[
insert into CUST_SUB_INTER(ID,CITY_NAME,COUNTRY_NAME,PUBLICATION_NAME,SUBSCRIPTION_REF_CODE,SUBSCRIPTION_STATE,
UNSUBSCRIBED,CITY_ID,PERSON_ID,SUBSCRIPTION_CREATED_AT,SUBSCRIPTION_UPDATED_AT,END_VALUE,JOB_ID,
MIGRATION_FILE_NAME,RECORD_INSERT_DATE)
values (SEQ_CUST_SUB_ERROR.NEXTVAL, :city_name, :country_name, :publication_name,
:subscription_ref_code, :subscription_state, :unsubscribed,
:city_id, :person_id, :subscription_created_at, :subscription_updated_at, :end_value,
#{jobExecutionContext[jobId]}, '#{jobExecutionContext[tempSubscriptionFile]}', SYSTIMESTAMP)
]]>
</value>
</property>
<property name="itemSqlParameterSourceProvider">
<bean class="org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider" />
</property>
</bean>
<!-- scope step is critical here-->
<bean id="SubscriptionItemStepTwoReader"
class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step">
<property name="dataSource" ref="dataSource"/>
<property name="sql">
<value>
<![CDATA[
SELECT DISTINCT PERSON_ID FROM CUST_SUB_INTER where job_id = #{jobExecutionContext.jobId} order by PERSON_ID
]]>
</value>
</property>
<property name="rowMapper">
<bean class="org.springframework.jdbc.core.SingleColumnRowMapper" p:requiredType="java.lang.Long"/>
</property>
</bean>
<bean id="SubscriptionItemStepTwoWriter" class="uk.co.batch.datamigration.subscription.SubscriptionItemStepTwoWriter" scope="step">
<property name="fileName" value="#{jobExecutionContext[tempSubscriptionFile]}" />
<property name="jobId" value="#{jobExecutionContext.jobId}" />
<property name="errorSaving" value="${Job.errorSaving}"/>
</bean>
<bean id="SubscriptionTaskExecutor"
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="10"/>
</bean>
<bean id="SubscriptionJobListener" class="uk.co.batch.datamigration.JobListener">
<property name="sessionFactory" ref="sftpSessionFactory"/>
<property name="remotePath" value="${Job.subscription.sftp.remoteDirectory}"/>
<property name="remoteExtension" value="${Job.sftp.remoteExtension}"/>
<property name="renameRemoteFile" value="true"/>
<property name="jobContextSingleFileParameterName" value="tempSubscriptionFile"/>
<property name="batchAdministratorUserId" value="${Job.batchAdministratorUserId}"/>
</bean>
<bean id="copySubscriptionFileLocally" class="uk.co.and.batch.core.LocalCopyTasklet" scope="prototype">
<property name="deleteRemoteFile" value="false"/>
<property name="fileNamePattern" value="${Job.subscription.filePattern}"/>
<property name="jobContextSingleFileParameterName" value="tempSubscriptionFile"/>
<property name="localPath" value="${Job.subscription.sftp.localDirectory}"/>
<property name="mustMatch" value="true"/>
<property name="remotePath" value="${Job.subscription.sftp.remoteDirectory}"/>
<property name="remoteSessionFactory" ref="sftpSessionFactory"/>
<property name="maxNumberDownload" value="1"/>
<property name="resolver">
<bean class="uk.co.and.batch.core.remote.FileNameResolvers.SFtpFileNameResolver" />
</property>
</bean>
<bean id="sftpSessionFactory" class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory">
<property name="host" value="ftp.co.uk"/>
<property name="port" value="22"/>
<property name="user" value=""/>
<property name="password" value=""/>
<property name="proxy" value="#{ ${proxyRef} }"/>
</bean>
</beans>

hsqldb file mode with spring, db not created

I'm trying to use hsqldb in file mode with spring and hibernate,
but the the database is not being initialized and i'm getting the following error message:
2015-03-12 23:58:35,542 FATAL [hsqldb.db.HSQLDB4C0FFF0D08.ENGINE]: could not reopen database
org.hsqldb.HsqlException: Database lock acquisition failure: lockFile: org.hsqldb.persist.LockFile#6602b8e7[file =C:\User\db".lck, exists=false, locked=false, valid=false, ] method: openRAF reason: java.io.FileNotFoundException: C:\User\db".lck (The filename, directory name, or volume label syntax is incorrect)
My spring configuration is as following:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:jdbc="http://www.springframework.org/schema/jdbc"
xmlns:context="http://www.springframework.org/schema/context" xmlns:tx="http://www.springframework.org/schema/tx"
xmlns:p="http://www.springframework.org/schema/p"
xsi:schemaLocation="http://www.springframework.org/schema/jdbc http://www.springframework.org/schema/jdbc/spring-jdbc-4.1.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.1.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-4.1.xsd
http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-4.1.xsd">
<tx:annotation-driven />
<context:component-scan base-package="org.oss.orm" />
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location">
<value>db.properties</value>
</property>
</bean>
<!-- data source -->
<bean id="dataSource" class="com.jolbox.bonecp.BoneCPDataSource">
<property name="driverClass" value="${jdbc.driverClassName}" />
<property name="jdbcUrl" value="${jdbc.url}" />
<property name="username" value="${jdbc.username}" />
<property name="password" value="${jdbc.password}" />
<property name="partitionCount" value="3" />
<property name="acquireIncrement" value="10" />
<property name="maxConnectionsPerPartition" value="60" />
<property name="minConnectionsPerPartition" value="20" />
</bean>
<jdbc:initialize-database data-source="dataSource">
<jdbc:script location="classpath:scripts/create-table-if-not-exists" />
</jdbc:initialize-database>
<!-- Session Factory -->
<bean id="sessionFactory" name="sessionFactory"
class="org.springframework.orm.hibernate4.LocalSessionFactoryBean">
<property name="dataSource" ref="dataSource"></property>
<property name="packagesToScan" value="org.oss.beans"></property>
<property name="hibernateProperties">
<props>
<prop key="dialect">org.hibernate.dialect.HSQLDialect</prop>
<prop key="hibernate.show_sql">true</prop>
<!-- Create Tables if does not exist -->
<prop key="hibernate.hbm2ddl.auto">create</prop>
</props>
</property>
<property name="annotatedClasses">
<list>
<value>org.oss.beans.Property</value>
<value>org.oss.beans.PropertyImage</value>
</list>
</property>
</bean>
<bean id="transactionManager"
class="org.springframework.orm.hibernate4.HibernateTransactionManager"
p:sessionFactory-ref="sessionFactory">
</bean>
With the seperate properties file:
jdbc.driverClassName=org.hsqldb.jdbcDriver
jdbc.url=jdbc:hsqldb:file:#{systemProperties['user.home']}\\db"
jdbc.dialect=org.hibernate.dialect.HSQLDialect
jdbc.username=sa
jdbc.password=
What changes needs to be made for the db to be initialized?

JMS automatically reconnecting to TOPIC

I have two apps one app is publishing to a topic and another app is reading from that topic. We had a scenario where a queuemanager went down and then was available again. The publisher (without a restart) carries on working fine after the queuemanager restarts, but the topic consumer does not recieve the new messages from the publisher until it is restarted. Is there some configuration that can be setup on the topic consumer to refresh its connection somehow? We are using java / spring / spring integration over IBM MQ. The following is the configuration of our consumer.
<bean id="jmsAlertsServiceMessageListener" class="org.springframework.jms.listener.DefaultMessageListenerContainer">
<property name="connectionFactory" ref="alertConnectionFactory"/>
<property name="destination" ref="alertsServiceTopic"/>
<property name="autoStartup" value="false"/>
<property name="clientId" value="${ps.Alert.clientId}"/>
<property name="taskExecutor" ref="jmsTaskExecutor"/>
<property name="subscriptionDurable" value="true"/>
<property name="pubSubDomain" value="true"/>
<property name="messageSelector" value="AlertState = 'RESOLVED'"/>
<property name="messageListener" ref="alertsMessageListener"/>
<property name="durableSubscriptionName" value="replay"/>
<property name="recoveryInterval" value="30000"/>
</bean>
<bean id="alertConnectionFactory" class="com.ibm.mq.jms.MQConnectionFactory" >
<property name="transportType" value="1" />
<property name="queueManager" value="${alert.mq.qm}" />
<property name="hostName" value="${alert.mq.host}" />
<property name="port" value="${alert.mq.port}" />
<property name="channel" value="${alert.mq.channel}" />
<property name="SSLCipherSuite" value="SSL_RSA_WITH_RC4_128_SHA" />
<property name="brokerPubQueue" value="${alert.mq.topic_connection_factory_broker_pub_queue}"/>
<property name="brokerQueueManager" value="${alert.mq.topic_connection_factory_broker_queue_manager}"/>
<property name="providerVersion" value="${alert.mq.topic_connection_factory_provider_version}"/>
<property name="brokerVersion" value="1"/>
<property name="messageSelection" value="1"/>
</bean>
<bean id="alertsServiceTopic" class="com.ibm.mq.jms.MQTopic">
<constructor-arg value="${alert.mq.topic}" />
<property name="brokerDurSubQueue" value="${alert.mq.queue}"/>
<property name="brokerVersion" value="1"/>
</bean>
<bean id="alertsMessageListener" class="org.springframework.integration.jms.ChannelPublishingJmsMessageListener">
<property name="requestChannel" ref="alertsJmsInputChannel"/>
</bean>
<bean id="jmsTaskExecutor" class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="1"/>
<property name="maxPoolSize" value="1"/>
<property name="waitForTasksToCompleteOnShutdown" value="true"/>
<property name="daemon" value="false"/>
<property name="threadNamePrefix" value="jmsInbound-"/>
<property name="queueCapacity" value="3"/>
<!-- Discard any task that gets rejected. -->
<property name="rejectedExecutionHandler">
<bean class="java.util.concurrent.ThreadPoolExecutor$DiscardPolicy"/>
</property>
</bean>
The DefaultMessageListenerContainer will automatically reconnect according to the recoveryInterval.
Turn on DEBUG (or even TRACE) logging to figure out what's happening.

How do I change my XML to have hibernate to define the DataSource

can someone please show me how to change my XML to have hibernate to define the DataSource. I would like to see a good sample of it. Below is my xml file. can someone post what it would look like with hibernate defining the DataSource:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
xmlns:tx="http://www.springframework.org/schema/tx" xmlns:jdbc="http://www.springframework.org/schema/jdbc"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx-3.0.xsd
http://www.springframework.org/schema/jdbc
http://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd">
<context:property-placeholder location="classpath:jdbc.properties" />
<context:component-scan base-package="org.uftwf" />
<tx:annotation-driven transaction-manager="hibernateTransactionManager" />
<bean id="dataSource" class="com.mchange.v2.c3p0.ComboPooledDataSource"
destroy-method="close">
<!-- these are C3P0 properties -->
<property name="acquireIncrement" value="${database.acquireIncrement}" />
<property name="minPoolSize" value="${database.minPoolSize}" />
<property name="maxPoolSize" value="${database.maxPoolSize}" />
<property name="maxIdleTime" value="${database.maxIdleTime}" />
<property name="idleConnectionTestPeriod" value="300" />
<property name="driverClass" value="${database.driver}" />
<property name="jdbcUrl" value="${database.url}" />
<property name="user" value="${database.user}" />
<property name="password" value="${database.password}" />
</bean>
<bean id="sessionFactory"
class="org.springframework.orm.hibernate3.annotation.AnnotationSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="annotatedClasses">
<list>
<value>org.uftwf.schoolvisit.model.VisitModel</value>
<value>org.uftwf.schoolvisit.model.NameID_lookupModel</value>
<value>org.uftwf.schoolvisit.model.School_lookupModel</value>
</list>
</property>
<property name="hibernateProperties">
<props>
<prop key="hibernate.dialect">${hibernate.dialect}</prop>
<prop key="hibernate.show_sql">${hibernate.show_sql}</prop>
<prop key="hibernate.use_sql_comments">${hibernate.use_sql_comments}</prop>
<prop key="format_sql">${format_sql}</prop>
<prop key="hibernate.c3p0.min_size">10</prop>
<prop key="hibernate.c3p0.max_size">25</prop>
<prop key="hibernate.c3p0.timeout">600</prop>
<prop key="hibernate.c3p0.max_statements">0</prop>
<prop key="hibernate.c3p0.idle_test_period">300</prop>
<prop key="hibernate.c3p0.acquire_increment">5</prop>
</props>
</property>
</bean>
<bean id="hibernateTransactionManager"
class="org.springframework.orm.hibernate3.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
</beans>
This is a comment that someone made to me about my XML so I am trying to see what he is talking about:
You should simplify your own life by using those hibernate. properties to define the DataSource in order to avoid a maintenance nightmare down the road by mixing them. It will help to avoid confusion like this. And don't hardcode the values that you have data properties for.*
Should be like this:
src/main/resources/META-INF/persistence.xml:
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<persistence xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="2.0" xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
<persistence-unit name="persistenceUnit" transaction-type="RESOURCE_LOCAL">
<provider>org.hibernate.ejb.HibernatePersistence</provider>
<properties>
<property name="hibernate.dialect" value="org.hibernate.dialect.MySQL5InnoDBDialect"/>
<!-- value="create" to build a new database on each run; value="update" to modify an existing database; value="create-drop" means the same as "create" but also drops tables when Hibernate closes; value="validate" makes no changes to the database -->
<property name="hibernate.hbm2ddl.auto" value="update"/>
<property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.ImprovedNamingStrategy"/>
<property name="hibernate.connection.charSet" value="UTF-8"/>
<property name="show_sql" value="true"/>
<!-- Uncomment the following two properties for JBoss only -->
<!-- property name="hibernate.validator.apply_to_ddl" value="false" /-->
<!-- property name="hibernate.validator.autoregister_listeners" value="false" /-->
</properties>
</persistence-unit>
</persistence>
src/main/resources/META-INF/spring/applicationContext.xml:
<bean class="org.springframework.orm.jpa.JpaTransactionManager" id="transactionManager">
<property name="entityManagerFactory" ref="entityManagerFactory"/>
</bean>
<tx:annotation-driven mode="aspectj" transaction-manager="transactionManager"/>
<bean class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean" id="entityManagerFactory">
<property name="persistenceUnitName" value="persistenceUnit"/>
<property name="dataSource" ref="dataSource"/>
</bean>
<bean class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close" id="dataSource">
<property name="driverClassName" value="${database.driverClassName}"/>
<property name="url" value="${database.url}"/>
<property name="username" value="${database.username}"/>
<property name="password" value="${database.password}"/>
<property name="testOnBorrow" value="true"/>
<property name="testOnReturn" value="true"/>
<property name="testWhileIdle" value="true"/>
<property name="timeBetweenEvictionRunsMillis" value="1800000"/>
<property name="numTestsPerEvictionRun" value="3"/>
<property name="minEvictableIdleTimeMillis" value="1800000"/>
<property name="validationQuery" value="SELECT 1"/>
<property name="initialSize" value="1"/>
<property name="minIdle" value="1"/>
<property name="maxActive" value="10"/>
<property name="poolPreparedStatements" value="true"/>
<property name="maxOpenPreparedStatements" value="20"/>
</bean>

Spring 2.5 login not responding even with the correct username/password

I developed a login mechanism for one of my projects with Spring 2.5 , asegi security 1.0.7 and I used Tomcat 6 as my development server.When I was developing the project everything worked fine and I could successfully log-in.The problem begun when I deployed my application on the production server.From the moment I deployed the application on the production tomcat 6 I could not log-in even with the correct username and password and the most weird of all is that no exception is thrown.I just can't log -in!
here is the application-context.xml of the application:
<bean id="authedicationProvider" class="org.acegisecurity.providers.dao.DaoAuthenticationProvider">
<property name="userDetailsService" ref="userDetailService"/>
</bean>
<bean id="authenticationEntryPoint" class="org.acegisecurity.ui.webapp.AuthenticationProcessingFilterEntryPoint">
<property name="loginFormUrl" value="/login.htm" />
</bean>
<bean id="filterChainProxy"
class="org.acegisecurity.util.FilterChainProxy">
<property name="filterInvocationDefinitionSource">
<value>
CONVERT_URL_TO_LOWERCASE_BEFORE_COMPARISON
PATTERN_TYPE_APACHE_ANT
/**=authenticationProcessingFilter,exceptionTranslationFilter
</value>
</property>
</bean>
<bean id="authenticationProcessingFilter"
class="org.acegisecurity.ui.webapp.AuthenticationProcessingFilter">
<property name="authenticationManager" ref="authenticationManager"/>
<property name="authenticationFailureUrl" value="/error.htm" />
<property name="defaultTargetUrl" value="/admin_menu.htm" />
<property name="filterProcessesUrl" value="/j_acegi_security_check" />
</bean>
<bean id="roleVoter" class="org.acegisecurity.vote.RoleVoter"/>
<bean id="accessDecisionManager" class="org.acegisecurity.vote.UnanimousBased">
<property name="decisionVoters">
<list>
<ref bean="roleVoter"/>
</list>
</property>
<property name="allowIfAllAbstainDecisions" value="true"/>
</bean>
<bean id="filterSecurityInterceptor" class="org.acegisecurity.intercept.web.FilterSecurityInterceptor">
<property name="authenticationManager" ref="authenticationManager"/>
<property name="accessDecisionManager" ref="accessDecisionManager"/>
<property name="objectDefinitionSource">
<value>
CONVERT_URL_TO_LOWERCASE_BEFORE_COMPARISON
PATTERN_TYPE_APACHE_ANT
/add_article.htm=ROLE_ADMIN
/add_publication.htm=ROLE_ADMIN
/admin_menu.htm=ROLE_ADMIN
</value>
</bean>
</property>
</bean>
<bean id="authenticationManager" class="org.acegisecurity.providers.ProviderManager">
<property name="providers">
<list>
<ref bean="authedicationProvider"/>
</list>
</property>
</bean>
<bean id="userDetailService" class="org.acegisecurity.userdetails.jdbc.JdbcDaoImpl">
<property name="dataSource" ref="datasource"/>
<property name="usersByUsernameQuery">
<value>
SELECT username,password,'true' AS enabled FROM Users where username=?
</value>
</property>
<property name="authoritiesByUsernameQuery">
<value>
SELECT username,role_name FROM Roles r,Users u WHERE r.user=u.id AND u.username=?
</value>
</property>
</bean>
Am I missing somthing?Any help would be really appreciated!Thank you in advance
Is this what you have <bean id="authe**d**icationProvider" in your first line of application file ?

Categories

Resources