Error in hibernate while calling stored procedure - java

I am calling a stored procedure from hibernate but it is giving me an error.I executed the stored procedure in mysql separately and it gave me the result.This is the error i got from hibernate
org.springframework.orm.hibernate3.HibernateSystemException: No Dialect mapping for JDBC type: -1; nested exception is org.hibernate.MappingException: No Dialect mapping for JDBC type: -1
at org.springframework.orm.hibernate3.SessionFactoryUtils.convertHibernateAccessException(SessionFactoryUtils.java:676)
at org.springframework.orm.hibernate3.HibernateAccessor.convertHibernateAccessException(HibernateAccessor.java:412)
at org.springframework.orm.hibernate3.HibernateTemplate.doExecute(HibernateTemplate.java:424)
at org.springframework.orm.hibernate3.HibernateTemplate.executeWithNativeSession(HibernateTemplate.java:374)
at org.springframework.orm.hibernate3.HibernateTemplate.findByNamedQuery(HibernateTemplate.java:988)
at com.iris.allofactor.data.dao.hibernate.HibernatePatientDao.getDetailedPatientList(HibernatePatientDao.java:2352)
at com.iris.allofactor.data.dao.test.PatientDaoTest.testgetPatientDetails(PatientDaoTest.java:578)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at junit.framework.TestCase.runTest(TestCase.java:154)
at junit.framework.TestCase.runBare(TestCase.java:127)
at junit.framework.TestResult$1.protect(TestResult.java:106)
at junit.framework.TestResult.runProtected(TestResult.java:124)
at junit.framework.TestResult.run(TestResult.java:109)
at junit.framework.TestCase.run(TestCase.java:118)
at junit.framework.TestSuite.runTest(TestSuite.java:208)
at junit.framework.TestSuite.run(TestSuite.java:203)
at org.eclipse.jdt.internal.junit.runner.junit3.JUnit3TestReference.run(JUnit3TestReference.java:130)
at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:467)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:683)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:390)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:197)
Caused by: org.hibernate.MappingException: No Dialect mapping for JDBC type: -1
at org.hibernate.dialect.TypeNames.get(TypeNames.java:56)
at org.hibernate.dialect.TypeNames.get(TypeNames.java:81)
at org.hibernate.dialect.Dialect.getHibernateTypeName(Dialect.java:192)
at org.hibernate.loader.custom.CustomLoader.getHibernateType(CustomLoader.java:161)
at org.hibernate.loader.custom.CustomLoader.autoDiscoverTypes(CustomLoader.java:131)
at org.hibernate.loader.Loader.getResultSet(Loader.java:1678)
at org.hibernate.loader.Loader.doQuery(Loader.java:662)
at org.hibernate.loader.Loader.doQueryAndInitializeNonLazyCollections(Loader.java:224)
at org.hibernate.loader.Loader.doList(Loader.java:2145)
at org.hibernate.loader.Loader.listIgnoreQueryCache(Loader.java:2029)
at org.hibernate.loader.Loader.list(Loader.java:2024)
at org.hibernate.loader.custom.CustomLoader.list(CustomLoader.java:111)
at org.hibernate.impl.SessionImpl.listCustomQuery(SessionImpl.java:1655)
at org.hibernate.impl.AbstractSessionImpl.list(AbstractSessionImpl.java:142)
at org.hibernate.impl.SQLQueryImpl.list(SQLQueryImpl.java:164)
at org.springframework.orm.hibernate3.HibernateTemplate$33.doInHibernate(HibernateTemplate.java:997)
at org.springframework.orm.hibernate3.HibernateTemplate.doExecute(HibernateTemplate.java:419)
... 22 more
This is the stored procedure i am trying to call
PROCEDURE DETAILED_PATIENT_LIST(_clinicId INT,_offset INT,_count INT)
BEGIN
SET #a = _offset;
SET #b = _count;
SET #c = _clinicId;
DROP TEMPORARY TABLE IF EXISTS MAX_VISIT;
DROP TEMPORARY TABLE IF EXISTS TOTAL_VISIT;
CREATE TEMPORARY TABLE TOTAL_VISIT (
KEY TOTAL_KEY(CLINIC_ID) )
SELECT VISIT_DATE AS VISIT_DATE,PV.PATIENT_ID,PV.VISIT_ID,PV.CLINIC_ID,VD.ICD_CODE_ONE,VD.ICD_CODE_TWO,
VD.ICD_CODE_THREE,VD.ICD_CODE_FOUR,GROUP_CONCAT(VP.PROCEDURE_CODE SEPARATOR ',') PROCEDURE_CODE FROM
PATIENT_VISIT PV
JOIN VISIT_PROCEDURE VP ON VP.CLINIC_ID = PV.CLINIC_ID AND VP.VISIT_ID = PV.VISIT_ID
JOIN VISIT_DIAGNOSIS VD ON VD.CLINIC_ID = PV.CLINIC_ID AND VD.VISIT_ID = PV.VISIT_ID
WHERE PV.CLINIC_ID = _clinicId AND VD.ICD_ID_ONE <> 0 GROUP BY PV.VISIT_ID
ORDER BY PATIENT_ID,VISIT_DATE DESC;
CREATE TEMPORARY TABLE MAX_VISIT (
KEY `MAX_KEY` (`CLINIC_ID`,`PATIENT_ID`) )
SELECT MAX(VISIT_DATE) AS VISIT_DATE,PATIENT_ID,VISIT_ID,CLINIC_ID,ICD_CODE_ONE,ICD_CODE_TWO,
ICD_CODE_THREE,ICD_CODE_FOUR,PROCEDURE_CODE FROM TOTAL_VISIT WHERE CLINIC_ID = _clinicId GROUP BY PATIENT_ID;
PREPARE STMT FROM "SELECT P.PATIENT_ID,P.CLINIC_ID,P.MRN,P.FIRST_NAME,P.LAST_NAME,P.MIDDLE_NAME,P.SSN,P.DOB,P.SEX,P.A DDRESS_LINE1,
P.ADDRESS_LINE2,P.CITY,P.STATE,P.COUNTRY,P.PHONE,P.ZIP,P.FAX,P.EMAIL,P.BILLING_METHOD,P.ALT_PHONE,
P.MARITAL_STATUS,P.ACTIVE,P.INACTIVE_REASON,
I1.POLICY_NO AS PRIMARY_POLICY_NO,I1.GROUP_NO AS PRIMARY_GROUP_NO,I1.EDOC_END AS I1EDOC_END,
I2.POLICY_NO AS SECONDERY_POLICY_NO,I2.GROUP_NO AS SECONDERY_GROUP_NO,I2.EDOC_END AS I2EDOC_END,
I3.POLICY_NO AS TERTIERY_POLICY_NO,I3.GROUP_NO AS TERTIERY_GROUP_NO,I3.EDOC_END AS I3EDOC_END,
B1.NAME AS PRIMARYINS,B1.ADDRESS_LINE1 AS PRIMARY_ADD1,B1.ADDRESS_LINE2 AS PRIMARY_ADD2,B1.CITY AS PRIMARY_CITY,B1.STATE AS PRIMARY_STATE,
B1.COUNTRY AS PRIMARY_COUNTRY,B1.PHONE AS PRIMARY_PHONE,B1.ZIP AS PRIMARY_ZIP,B1.EMAIL AS PRIMARY_EMAIL,
B2.NAME AS SECONDERY,B2.ADDRESS_LINE1 AS SECONDERY_ADD1,B2.ADDRESS_LINE2 AS SECONDERY_ADD2,B2.CITY AS SECONDERY_CITY,B2.STATE AS SECONDERY_STATE,
B2.COUNTRY AS SECONDERY_COUNTRY,B2.PHONE AS SECONDERY_PHONE,B2.ZIP AS SECONDERY_ZIP,B2.EMAIL AS SECONDERY_EMAIL,
B3.NAME AS TERTIERY,B3.ADDRESS_LINE1 AS TERTIERY_ADD1,B3.ADDRESS_LINE2 AS TERTIERY_ADD2,B3.CITY AS TERTIERY_CITY,B3.STATE AS TERTIERY_STATE,
B3.COUNTRY AS TERTIERY_COUNTRY,B3.PHONE AS TERTIERY_PHONE,B3.ZIP AS TERTIERY_ZIP,B3.EMAIL AS TERTIERY_EMAIL,
CONCAT(PH1.LAST_NAME, ', ' , PH1.FIRST_NAME , ' ' , PH1.MIDDLE_NAME) AS POLICYHOLDER1,PH1.RELATIONSHIP AS RELATIONSHIP1,
PH1.ADDRESS_LINE1 AS PH1ADDRESS_LINE1,PH1.ADDRESS_LINE2 AS PH1ADDRESS_LINE2,PH1.CITY AS PH1CITY,PH1.STATE AS PH1STATE,PH1.COUNTRY AS PH1COUNTRY,
CONCAT(PH2.LAST_NAME, ', ' , PH2.FIRST_NAME , ' ' , PH2.MIDDLE_NAME) AS POLICYHOLDER2,PH2.RELATIONSHIP AS RELATIONSHIP2,
PH2.ADDRESS_LINE1 AS PH2ADDRESS_LINE1,PH2.ADDRESS_LINE2 AS PH2ADDRESS_LINE2,PH2.CITY AS PH2CITY,PH2.STATE AS PH2STATE,PH2.COUNTRY AS PH2COUNTRY,
CONCAT(PH3.LAST_NAME, ', ' , PH3.FIRST_NAME , ' ' , PH3.MIDDLE_NAME) AS POLICYHOLDER3,PH3.RELATIONSHIP AS RELATIONSHIP3,
PH3.ADDRESS_LINE1 AS PH3ADDRESS_LINE1,PH3.ADDRESS_LINE2 AS PH3ADDRESS_LINE2,PH3.CITY AS PH3CITY,PH3.STATE AS PH3STATE,PH3.COUNTRY AS PH3COUNTRY,
VISIT_DATE AS LASTVISIT,P.NET_BALANCE,ICD_CODE_ONE,ICD_CODE_TWO,ICD_CODE_THREE,ICD_CODE_FOUR,PROCEDURE_CODE
FROM PATIENT P
LEFT JOIN A I1 ON (I1.CLINIC_ID = P.CLINIC_ID AND I1.PATIENT_ID = P.PATIENT_ID AND I1.ACTIVE = TRUE AND I1.RANKING = 1)
LEFT JOIN B I2 ON (I2.CLINIC_ID = P.CLINIC_ID AND I2.PATIENT_ID = P.PATIENT_ID AND I2.ACTIVE = TRUE AND I2.RANKING = 2)
LEFT JOIN C I3 ON (I3.CLINIC_ID = P.CLINIC_ID AND I3.PATIENT_ID = P.PATIENT_ID AND I3.ACTIVE = TRUE AND I3.RANKING = 3)
LEFT JOIN D B1 ON (B1.CLINIC_ID = I1.CLINIC_ID AND B1.BUSINESS_ID = I1.COMPANY_ID AND I1.RANKING = 1 AND B1.ACTIVE = TRUE)
LEFT JOIN D B2 ON (B2.CLINIC_ID = I2.CLINIC_ID AND B2.BUSINESS_ID = I2.COMPANY_ID AND I2.RANKING = 2 AND B2.ACTIVE = TRUE)
LEFT JOIN D B3 ON (B3.CLINIC_ID = I3.CLINIC_ID AND B3.BUSINESS_ID = I3.COMPANY_ID AND I3.RANKING = 3 AND B3.ACTIVE = TRUE)
LEFT JOIN E PH1 ON (PH1.CLINIC_ID = I1.CLINIC_ID AND PH1.POLICY_HOLDER_ID = I1.POLICY_HOLDER_ID)
LEFT JOIN E PH2 ON (PH2.CLINIC_ID = I2.CLINIC_ID AND PH2.POLICY_HOLDER_ID = I2.POLICY_HOLDER_ID)
LEFT JOIN E PH3 ON (PH3.CLINIC_ID = I3.CLINIC_ID AND PH3.POLICY_HOLDER_ID = I3.POLICY_HOLDER_ID)
LEFT JOIN MAX_VISIT ON (MAX_VISIT.CLINIC_ID = P.CLINIC_ID AND MAX_VISIT.PATIENT_ID = P.PATIENT_ID)
WHERE P.CLINIC_ID = ? AND P.MARK_AS_DELETE = 0 LIMIT ?,?";
EXECUTE STMT USING #c,#a,#b;
END
I am not sure why this error occurs as i am getting the result calling the stored procedure independantly.Can anyone have a solution for this.Thanks in advance..
This is the configuration for hibernate ,code
--------------------------------------------------------------------------------------- ------
hibernate.dialect=org.hibernate.dialect.MySQLDialect
hibernate.show_sql=true
#hibernate.query_factory_class=org.hibernate.hql.classic.ClassicQueryTranslatorFactory
hibernate.query_factory_class=org.hibernate.hql.ast.ASTQueryTranslatorFactory
<property name="hibernateProperties">
<props>
<prop key="hibernate.dialect">${hibernate.dialect}</prop>
<prop key="hibernate.show_sql">${hibernate.show_sql}</prop>
<prop key="hibernate.query.factory_class">${hibernate.query_factory_class}</prop>
<prop key="hibernate.generate_statistics">true</prop>
</props>
</property>
<bean id="transactionManager" class="org.springframework.orm.hibernate3.HibernateTransactionManager">
<property name="sessionFactory"><ref bean="sessionFactory"/></property>
</bean>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<sql-query name="getDetailedPatientListReport" callable="true" >
{ call DETAILED_PATIENT_LIST (?,?,?) }
</sql-query>
</hibernate-mapping>
code:
getHibernateTemplate().findByNamedQuery("getDetailedPatientListReport",new Object[]
{iClinicId,start,range});

Related

InvalidRequestException(why:Unknown identifier

when I tried to save a table to cassandra using persist() method and kundera framework, i receive the error:
28462 [Thread-15-localhostAMQPbolt0-executor[2 2]] INFO d.d.pieceDAOImpl - start to insert data
28513 [Thread-15-localhostAMQPbolt0-executor[2 2]] INFO c.i.c.c.CassandraClientBase - Returning cql query INSERT INTO "pieces"("width","height","depth","IdPiece") VALUES(10.0,11.0,12.0,'1') .
28543 [Thread-15-localhostAMQPbolt0-executor[2 2]] ERROR c.i.c.c.CassandraClientBase - Error while executing query INSERT INTO "pieces"("width","height","depth","IdPiece") VALUES(10.0,11.0,12.0,'1')
28544 [Thread-15-localhostAMQPbolt0-executor[2 2]] ERROR o.a.s.util - Async loop died!
java.lang.RuntimeException: com.impetus.kundera.KunderaException: com.impetus.kundera.KunderaException: InvalidRequestException(why:Unknown identifier IdPiece)
at org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:448) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:414) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:73) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.daemon.executor$fn__8226$fn__8239$fn__8292.invoke(executor.clj:851) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.util$async_loop$fn__554.invoke(util.clj:484) [storm-core-1.0.0.jar:1.0.0]
at clojure.lang.AFn.run(AFn.java:22) [clojure-1.7.0.jar:?]
at java.lang.Thread.run(Thread.java:745) [?:1.7.0_99]
Caused by: com.impetus.kundera.KunderaException: com.impetus.kundera.KunderaException: InvalidRequestException(why:Unknown identifier IdPiece)
at com.impetus.kundera.persistence.EntityManagerImpl.persist(EntityManagerImpl.java:180) ~[project-0.0.1-SNAPSHOT-jar-with-dependencies.jar:?]
at database.dao.pieceDAOImpl.insert(pieceDAOImpl.java:54) ~[project-0.0.1-SNAPSHOT-jar-with-dependencies.jar:?]
at database.controller.DatabaseController.saveSensorEntitie(DatabaseController.java:47) ~[project-0.0.1-SNAPSHOT-jar-with-dependencies.jar:?]
at connector.bolt.PrinterBolt.execute(PrinterBolt.java:66) ~[project-0.0.1-SNAPSHOT-jar-with-dependencies.jar:?]
at org.apache.storm.daemon.executor$fn__8226$tuple_action_fn__8228.invoke(executor.clj:731) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.daemon.executor$mk_task_receiver$fn__8147.invoke(executor.clj:463) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.disruptor$clojure_handler$reify__7663.onEvent(disruptor.clj:40) ~[storm-core-1.0.0.jar:1.0.0]
at org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:435) ~[storm-core-1.0.0.jar:1.0.0]
... 6 more
And im sur that my idpiece is the primary key of my table.
my table:
CREATE TABLE mykeyspace.pieces (
idpiece text PRIMARY KEY,
depth double,
height double,
width double
) WITH bloom_filter_fp_chance = 0.01
AND caching = '{"keys":"ALL", "rows_per_partition":"NONE"}'
AND comment = ''
AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'}
AND compression = {'sstable_compression': 'org.apache.cassandra.io.compress.LZ4Compressor'}
AND dclocal_read_repair_chance = 0.1
AND default_time_to_live = 0
AND gc_grace_seconds = 864000
AND max_index_interval = 2048
AND memtable_flush_period_in_ms = 0
AND min_index_interval = 128
AND read_repair_chance = 0.0
AND speculative_retry = '99.0PERCENTILE';
my class entity
#Entity
#Table(name = "pieces", schema = "mykeyspace#cassandra_pu")
public class PieceEntitie implements Serializable{
#Id
private String IdPiece;
#Column
private double width;
#Column
private double height;
#Column
private double depth;
how can i resolve this problem ?
thank you in advance
In Cassandra quoted identifiers are case-sensitive.
So your column name "IdPiece" is different then actual column idpiece in your table pieces
Kundera uses quoted identifiers in the generated query.
INSERT INTO "pieces"("width","height","depth","IdPiece") VALUES(10.0,11.0,12.0,'1')
In cassandra, quoted identifiers are case-sensitive. So there is no such column "IdPiece" in your table.
Solution is to rename your field to idpiece in entity class

Solr Custom Transfomer Not Working?

I am trying to Add Few Fields to solr Index While indexing with Data Import Handler.
Below is My data-config.xml
<dataConfig>
<dataSource type="JdbcDataSource" driver="com.mysql.jdbc.Driver"
url="jdbc:mysql://localhost:3306/db" user="*****" password="*********"/>
<script><![CDATA[
function addMergedPdt(row)
{
var m = row.get('mergedPdt');
if(m == null)
{
row.put('mergedPdt',9999999999);
}
return row;
}
]]></script>
<script><![CDATA[
function transform(row)
{
if(row.get(mergedPdt) == null)
{
row.put('catStock', 0);
row.put('catPxMrp',0);
row.put('catPrice',0);
row.put('catCount',1)
row.put('catRating',0);
row.put('catAval',0);
return row;
}
else
{
row.put('catAval',1);
return row;
}
}
]]></script>
<document>
<entity name="product" onError="continue" transformer="script:addMergedPdt" query="select p.id, name, image, stock, lower(p.seller) as seller, brand,
cast(price as signed) as price, cast(pxMrp as signed) as pxMrp, mergedPdt, shipDays, url, cast(rating as signed) as rating,
disc(price, pxMrp) as discount, mc.node as seller_cat, oc.node as cat, substring_index(oc.node, '|', 1) as cat1,
substring(substring_index(oc.node, '|', 2), length(substring_index(oc.node, '|', 1)) + 2) as cat2,
substring(substring_index(oc.node, '|', 3), length(substring_index(oc.node, '|', 2)) + 2) as cat3
from _products as p, _mergedCat as mc, _ourCat as oc where active = 1 and cat_id = mc.id and ourCat = oc.id and
('${dataimporter.request.full}' != 'false' OR last_visited > '${dataimporter.last_index_time}') limit 10000">
<!-- To Papulate the Catalog Data -->
<entity name="mergedPdt" transformer="script:transform" onError="continue" query = "SELECT mergedPdt,count(*) as catCount,cast(max(stock) as signed) as catStock,cast(max(pxMrp) as signed) as catPxMrp,cast(min(price) as signed) as catPrice,cast(avg(rating) as signed) as catRating FROM `_products` where mergedPdt = ${product.mergedPdt}"/>
</entity>
I am Getting some Error Like
org.apache.solr.handler.dataimport.DataImportHandlerException: Error invoking script for entity mergedPdt Processing Document # 10000
at org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow(DataImportHandlerException.java:70)
at org.apache.solr.handler.dataimport.ScriptTransformer.transformRow(ScriptTransformer.java:59)
at org.apache.solr.handler.dataimport.EntityProcessorWrapper.applyTransformer(EntityProcessorWrapper.java:198)
at org.apache.solr.handler.dataimport.EntityProcessorWrapper.nextRow(EntityProcessorWrapper.java:256)
at org.apache.solr.handler.dataimport.DocBuilder.buildDocument(DocBuilder.java:475)
at org.apache.solr.handler.dataimport.DocBuilder.buildDocument(DocBuilder.java:514)
at org.apache.solr.handler.dataimport.DocBuilder.buildDocument(DocBuilder.java:414)
at org.apache.solr.handler.dataimport.DocBuilder.doFullDump(DocBuilder.java:329)
at org.apache.solr.handler.dataimport.DocBuilder.execute(DocBuilder.java:232)
at org.apache.solr.handler.dataimport.DataImporter.doFullImport(DataImporter.java:416)
at org.apache.solr.handler.dataimport.DataImporter.runCmd(DataImporter.java:480)
at org.apache.solr.handler.dataimport.DataImporter$1.run(DataImporter.java:461)
Caused by: java.lang.NoSuchMethodException: no such method: transform
at com.sun.script.javascript.RhinoScriptEngine.invoke(RhinoScriptEngine.java:286)
at com.sun.script.javascript.RhinoScriptEngine.invokeFunction(RhinoScriptEngine.java:258)
at org.apache.solr.handler.dataimport.ScriptTransformer.transformRow(ScriptTransformer.java:55)
... 10 more
and all fields are being Indexed excluding the extra fields which i tried adding using the transformer.
And surprisingly only one field "catCount" has been indexed.
You can trust me and i am confident regarding the schema definition and other configurations.
any lead will be higly appriciated ??
Thanks in advance :)

Postgresql infinite loop after a hibernate criteria list

I'm stucked with a database problem for several days now. The application hangs after a specific hibernate criteria.list(). Exactly by the following stacktrace:
java.net.SocketInputStream.read(byte[], int, int)
org.postgresql.core.VisibleBufferedInputStream.readMore(int)
org.postgresql.core.VisibleBufferedInputStream.ensureBytes(int)
org.postgresql.core.VisibleBufferedInputStream.read()
org.postgresql.core.PGStream.ReceiveChar()
org.postgresql.core.v3.QueryExecutorImpl.processResults(ResultHandler, int)
org.postgresql.core.v3.QueryExecutorImpl.execute(Query, ParameterList, ResultHandler, int, int, int)
org.postgresql.jdbc2.AbstractJdbc2Statement.execute(Query, ParameterList, int)
org.postgresql.jdbc2.AbstractJdbc2Statement.executeWithFlags(int)
org.postgresql.jdbc2.AbstractJdbc2Statement.executeQuery()
org.hibernate.internal.CriteriaImpl.list()
After some researches and tests I found that the problem is not a blocking query, but a query that is executed forever.
It's a java spring application with the following sessionFactory and transaction manager configuration:
<bean id="dataSource" class="org.apache.commons.dbcp2.BasicDataSource" destroy-method="close">
<property name="driverClassName" value="org.postgresql.Driver" />
<property name="url" value="jdbc:postgresql://localhost:5432/database" />
<property name="username" value="username" />
<property name="password" value="password" />
</bean>
<bean id="sessionFactory"
class="org.springframework.orm.hibernate4.LocalSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="packagesToScan" value="com.myapp.domain" />
<property name="configLocation" value="/WEB-INF/hibernate.cfg.xml" />
</bean>
<tx:annotation-driven transaction-manager="transactionManager" />
<bean id="transactionManager"
class="org.springframework.orm.hibernate4.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
The underlying database is PostgreSQL and here is the current hibernate configuration
<hibernate-configuration>
<session-factory>
<property name="dialect">org.hibernate.dialect.PostgreSQLDialect</property>
<property name="hbm2ddl.auto">none</property>
<property name="hibernate.cache.region.factory_class">org.hibernate.cache.ehcache.SingletonEhCacheRegionFactory</property>
<property name="hibernate.cache.use_second_level_cache">true</property>
<property name="cache.use_query_cache">true</property>
<property name="hibernate.transaction.factory_class">
org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="show_sql">false</property>
<property name="format_sql">true</property>
<property name="use_sql_comments">false</property>
<property name="order_updates">true</property>
</session-factory>
</hibernate-configuration>
The critical area in the code is:
private void fillEmptyNames() throws CablewatchException {
List<Device> devicesList = deviceDao.getDevices();
if (devicesList != null) {
for (Device device : devicesList {
String name = deviceDao.getDeviceName(device.getModule().getObjectIdentifier(), device.getSubrack(), device.getSlot());
...
}
}
}
The application hangs on the second dao function "getDeviceName". which is implemented as follow:
#Transactional(timeout=30)
public String getDeviceName(long moduleId, int subrackNr, int slotNr) throws CablewatchException {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(Device.class).add(Restrictions.eq("module.objectIdentifier", moduleId)).add(Restrictions.eq("subrack",subrackNr)).add(Restrictions.eq("slot",slotNr)).addOrder(Order.desc("objectIdentifier")).setMaxResults(1);
List<Device> devicesList = criteria.list();
if (devicesList != null && !devicesList.isEmpty() && devicesList.get(0) instanceof Device) {
Device device = devicesList.get(0);
return device.getName();
}
return null;
}
Also a detail I'm confronted with is that the same passage works fine under Windows, so currently the problem is only happening on Linux.
UPDATE:
The generated query is:
select
this_.objectIdentifier as objectId1_0_9_,
this_.ackId as ackId11_0_9_,
this_.alarmInfoId as alarmIn12_0_9_,
this_.cleared as cleared2_0_9_,
this_.clearedTime as clearedT3_0_9_,
this_.logIndex as logIndex4_0_9_,
this_.module as module5_0_9_,
this_.neId as neId13_0_9_,
this_.occurenceTime as occurenc6_0_9_,
this_.serial as serial7_0_9_,
this_.severityId as severit14_0_9_,
this_.slot as slot8_0_9_,
this_.subrack as subrack9_0_9_,
this_.value as value10_0_9_,
acknowledg2_.objectIdentifier as objectId1_2_0_,
acknowledg2_.comment as comment2_2_0_,
acknowledg2_.username as username3_2_0_,
alarminfo3_.objectIdentifier as objectId1_1_1_,
alarminfo3_.cw_alarmMessage as cw_alarm2_1_1_,
alarminfo3_.cw_alarmOid as cw_alarm3_1_1_,
ne4_.OBJECTIDENTIFIER as OBJECTID1_8_2_,
ne4_.cw_neActive as cw_neAct2_8_2_,
ne4_.cw_neCategory as cw_neCat3_8_2_,
ne4_.cw_neFirmware as cw_neFir4_8_2_,
ne4_.cw_neHasWebInterface as cw_neHas5_8_2_,
ne4_.cw_neInetAddress as cw_neIne6_8_2_,
ne4_.cw_neInfo as cw_neInf7_8_2_,
ne4_.cw_neMacAddress as cw_neMac8_8_2_,
ne4_.cw_neModel as cw_neMod9_8_2_,
ne4_.cw_neName as cw_neNa10_8_2_,
ne4_.cw_neSerial as cw_neSe11_8_2_,
ne4_.cw_neSysDescription as cw_neSy12_8_2_,
ne4_.cw_neType as cw_neTy13_8_2_,
ne4_.cw_installationDate as cw_inst14_8_2_,
ne4_.cw_instance as cw_inst15_8_2_,
ne4_.cw_lastAlarmLogIndex as cw_last16_8_2_,
ne4_.cw_locationId as cw_loca19_8_2_,
ne4_.cw_readCommunity as cw_read17_8_2_,
ne4_.cw_severityId as cw_seve20_8_2_,
ne4_.cw_writeCommunity as cw_writ18_8_2_,
location5_.objectIdentifier as objectId1_5_3_,
location5_.cw_imageName as cw_image2_5_3_,
location5_.cw_locationCity as cw_locat3_5_3_,
location5_.cw_locationCode as cw_locat4_5_3_,
location5_.cw_locationContact as cw_locat5_5_3_,
location5_.cw_locationDescription1 as cw_locat6_5_3_,
location5_.cw_locationDescription2 as cw_locat7_5_3_,
location5_.cw_locationName as cw_locat8_5_3_,
location5_.cw_locationStreet as cw_locat9_5_3_,
location5_.cw_locationType as cw_loca10_5_3_,
location5_.cw_parentLocationId as cw_pare11_5_3_,
location5_.cw_severityId as cw_seve12_5_3_,
location5_.cw_sublocationSeverityId as cw_subl13_5_3_,
location6_.objectIdentifier as objectId1_5_4_,
location6_.cw_imageName as cw_image2_5_4_,
location6_.cw_locationCity as cw_locat3_5_4_,
location6_.cw_locationCode as cw_locat4_5_4_,
location6_.cw_locationContact as cw_locat5_5_4_,
location6_.cw_locationDescription1 as cw_locat6_5_4_,
location6_.cw_locationDescription2 as cw_locat7_5_4_,
location6_.cw_locationName as cw_locat8_5_4_,
location6_.cw_locationStreet as cw_locat9_5_4_,
location6_.cw_locationType as cw_loca10_5_4_,
location6_.cw_parentLocationId as cw_pare11_5_4_,
location6_.cw_severityId as cw_seve12_5_4_,
location6_.cw_sublocationSeverityId as cw_subl13_5_4_,
severity7_.id as id1_15_5_,
severity7_.cw_severityColor as cw_sever2_15_5_,
severity7_.cw_severityName as cw_sever3_15_5_,
severity8_.id as id1_15_6_,
severity8_.cw_severityColor as cw_sever2_15_6_,
severity8_.cw_severityName as cw_sever3_15_6_,
severity9_.id as id1_15_7_,
severity9_.cw_severityColor as cw_sever2_15_7_,
severity9_.cw_severityName as cw_sever3_15_7_,
severity10_.id as id1_15_8_,
severity10_.cw_severityColor as cw_sever2_15_8_,
severity10_.cw_severityName as cw_sever3_15_8_
from
CW_ALARM this_
left outer join
CW_Acknowledgment acknowledg2_
on this_.ackId=acknowledg2_.objectIdentifier
left outer join
CW_ALARMINFO alarminfo3_
on this_.alarmInfoId=alarminfo3_.objectIdentifier
left outer join
CW_NE ne4_
on this_.neId=ne4_.OBJECTIDENTIFIER
left outer join
CW_LOCATION location5_
on ne4_.cw_locationId=location5_.objectIdentifier
left outer join
CW_LOCATION location6_
on location5_.cw_parentLocationId=location6_.objectIdentifier
left outer join
CW_SEVERITY severity7_
on location6_.cw_severityId=severity7_.id
left outer join
CW_SEVERITY severity8_
on location6_.cw_sublocationSeverityId=severity8_.id
left outer join
CW_SEVERITY severity9_
on ne4_.cw_severityId=severity9_.id
left outer join
CW_SEVERITY severity10_
on this_.severityId=severity10_.id
where
this_.neId=?
and this_.subrack=?
and this_.slot=?
and this_.module<>?
order by
this_.objectIdentifier desc limit ?
I executed it from pgAdmin (and replaced the parameters with their values) and it works fine. Below is the query plan:
"Limit (cost=25819.66..25819.66 rows=1 width=1185)"
" -> Sort (cost=25819.66..25819.66 rows=1 width=1185)"
" Sort Key: this_.objectidentifier"
" -> Nested Loop Left Join (cost=0.00..25819.65 rows=1 width=1185)"
" -> Nested Loop Left Join (cost=0.00..25811.37 rows=1 width=1021)"
" -> Nested Loop Left Join (cost=0.00..25803.09 rows=1 width=857)"
" -> Nested Loop Left Join (cost=0.00..25799.21 rows=1 width=693)"
" -> Nested Loop Left Join (cost=0.00..25795.33 rows=1 width=529)"
" -> Nested Loop Left Join (cost=0.00..25793.45 rows=1 width=464)"
" Join Filter: (ne4_.cw_locationid = location5_.objectidentifier)"
" -> Nested Loop Left Join (cost=0.00..25791.22 rows=1 width=399)"
" Join Filter: (this_.neid = ne4_.objectidentifier)"
" -> Nested Loop Left Join (cost=0.00..25788.76 rows=1 width=225)"
" -> Nested Loop Left Join (cost=0.00..25780.48 rows=1 width=150)"
" Join Filter: (this_.ackid = acknowledg2_.objectidentifier)"
" -> Seq Scan on cw_alarm this_ (cost=0.00..25779.32 rows=1 width=132)"
" Filter: (((module)::text <> ''::text) AND (neid = 471) AND (subrack = (-1)) AND (slot = (-1)))"
" -> Seq Scan on cw_acknowledgment acknowledg2_ (cost=0.00..1.07 rows=7 width=18)"
" -> Index Scan using cw_alarminfo_pkey on cw_alarminfo alarminfo3_ (cost=0.00..8.27 rows=1 width=75)"
" Index Cond: (this_.alarminfoid = objectidentifier)"
" -> Seq Scan on cw_ne ne4_ (cost=0.00..2.45 rows=1 width=174)"
" Filter: (objectidentifier = 471)"
" -> Seq Scan on cw_location location5_ (cost=0.00..2.10 rows=10 width=65)"
" -> Index Scan using cw_location_pkey on cw_location location6_ (cost=0.00..1.87 rows=1 width=65)"
" Index Cond: (location5_.cw_parentlocationid = objectidentifier)"
" -> Index Scan using cw_severity_pkey on cw_severity severity7_ (cost=0.00..3.87 rows=1 width=164)"
" Index Cond: (location6_.cw_severityid = id)"
" -> Index Scan using cw_severity_pkey on cw_severity severity8_ (cost=0.00..3.87 rows=1 width=164)"
" Index Cond: (location6_.cw_sublocationseverityid = id)"
" -> Index Scan using cw_severity_pkey on cw_severity severity9_ (cost=0.00..8.27 rows=1 width=164)"
" Index Cond: (ne4_.cw_severityid = id)"
" -> Index Scan using cw_severity_pkey on cw_severity severity10_ (cost=0.00..8.27 rows=1 width=164)"
" Index Cond: (this_.severityid = id)"
Following the detail about Linux and Windows version, I've tried the same test but instead of the local database postgresql_9.1.13 (Debian) I used a remote access to postgresql_9.3.5(Windows) and installed and tried postgresql_9.1.13(Windows). Both worked correctly.
I also tried the same code from my Windows system to the remote postgresql_9.1.13(Debian) and another machine with a remote postgresql_9.1.15(Debian). In both cases the problem occurs.
It seems the problem could lay on the Linux version of postgresql_9.1.x.
Thanks in advance.
After debugging the database with the commands Craig and Clemens provided me (display current queries, pg_stat_activity, explain analyze select, pg_locks etc.), I found out that it isn't an infinite loop but due to an access for more than 20.000 entries the running time was very long. And due to the ORM layer this time was even extended to exceed several hours. I'm working on a small redesign of the database to optimize this issue.
Thanks guys for the support.

HQL Hibernate registerFunction FUNCTION does not exist

I'm trying to register a custom dialect in Hibernate. I found a ton of examples, and I thought I had it - and now it tells me that the function does not exist in my database. Specifically, I'm trying to create the date_add() dialect for MySQL 5.6. I'm using hibernate 3.6.7.
In hibernate.cfg.xml:
<hibernate-configuration>
<session-factory>
<property name="dialect">com.myPackage.CustomMySQLDialect</property>
</session-factory>
</hibernate-configuration>
In com.myPackage.CustomMySQLDialect:
public class CustomMySQLDialect extends MySQL5InnoDBDialect {
public CustomMySQLDialect() {
super();
registerFunction( "date_add_interval", new SQLFunctionTemplate( Hibernate.DATE, "date_add(?1, interval ?2 ?3)" ) );
}
}
And when I go to call it in the DAOImpl class:
List<User> userList = null;
Query confirmationUser = session.getCurrentSession().createQuery("from InitUser where enabled = 0 and date_add_interval(created_date, 1, day) < now() and end_date is null");
userList = confirmationUser.list(); // <-- this is where it dies
Here's the exception:
Hibernate: select inituser0_.init_user_id as init1_58_, inituser0_.confirmation_date as confirma2_58_, inituser0_.confirmation_sent_date as confirma3_58_, inituser0_.confirmation_token as confirma4_58_, inituser0_.created_date as created5_58_, inituser0_.current_sign_in_date as current6_58_, inituser0_.current_sign_in_ip as current7_58_, inituser0_.email as email58_, inituser0_.enabled as enabled58_, inituser0_.end_date as end10_58_, inituser0_.failed_sign_in_count as failed11_58_, inituser0_.last_sign_in_date as last12_58_, inituser0_.last_sign_in_ip as last13_58_, inituser0_.password as password58_, inituser0_.remember_created_date as remember15_58_, inituser0_.reset_password_confirmation_date as reset16_58_, inituser0_.reset_password_sent_date as reset17_58_, inituser0_.reset_password_token as reset18_58_, inituser0_.sign_in_count as sign19_58_, inituser0_.updated_by as updated20_58_, inituser0_.updated_date as updated21_58_, inituser0_.username as username58_ from InitUser inituser0_ where inituser0_.end_date is null
Hibernate: select inituser0_.init_user_id as init1_58_, inituser0_.confirmation_date as confirma2_58_, inituser0_.confirmation_sent_date as confirma3_58_, inituser0_.confirmation_token as confirma4_58_, inituser0_.created_date as created5_58_, inituser0_.current_sign_in_date as current6_58_, inituser0_.current_sign_in_ip as current7_58_, inituser0_.email as email58_, inituser0_.enabled as enabled58_, inituser0_.end_date as end10_58_, inituser0_.failed_sign_in_count as failed11_58_, inituser0_.last_sign_in_date as last12_58_, inituser0_.last_sign_in_ip as last13_58_, inituser0_.password as password58_, inituser0_.remember_created_date as remember15_58_, inituser0_.reset_password_confirmation_date as reset16_58_, inituser0_.reset_password_sent_date as reset17_58_, inituser0_.reset_password_token as reset18_58_, inituser0_.sign_in_count as sign19_58_, inituser0_.updated_by as updated20_58_, inituser0_.updated_date as updated21_58_, inituser0_.username as username58_ from InitUser inituser0_ where inituser0_.enabled=0 and date_add_interval(inituser0_.created_date, 1, day)<now() and (inituser0_.end_date is null)
WARN : org.hibernate.util.JDBCExceptionReporter - SQL Error: 1305, SQLState: 42000
ERROR: org.hibernate.util.JDBCExceptionReporter - FUNCTION myDBName.date_add_interval does not exist
ERROR: com.somePackage.InitUserDAOImpl - could not execute query
It gets beyond the creation of the query, and then tries to execute it, and then complains it can't find that function. It specifies the name of my database in the prefix to the database name.
Is this not being registered correctly? Should I specify myDBName as the database to register? If so, how?
Or am I way off?
In the end, I used Joda for the time calculation and sent it in pre-calculated. On the one hand, I had to re-do a bunch of code to support this. On the other, I've been using it ever sense and couldn't be happier.
http://www.joda.org/joda-time/

[Hive]I got "ArrayIndexOutOfBoundsException" while I query the hive database

I always get "ArrayIndexOutOfBoundsException" while I query the hive base(both hive-0.11.0 and hive-0.12.0), but sometimes not. Here is the error
java.lang.RuntimeException: Hive Runtime Error while closing operators: java.lang.ArrayIndexOutOfBoundsException: 0
at org.apache.hadoop.hive.ql.exec.mr.ExecReducer.close(ExecReducer.java:313)
at org.apache.hadoop.io.IOUtils.cleanup(IOUtils.java:232)
at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:539)
at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:396)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.ArrayIndexOutOfBoundsException: 0
at org.apache.hadoop.hive.ql.exec.persistence.RowContainer.first(RowContainer.java:231)
at org.apache.hadoop.hive.ql.exec.persistence.RowContainer.first(RowContainer.java:74)
at org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genUniqueJoinObject(CommonJoinOperator.java:645)
at org.apache.hadoop.hive.ql.exec.CommonJoinOperator.checkAndGenObject(CommonJoinOperator.java:758)
at org.apache.hadoop.hive.ql.exec.JoinOperator.endGroup(JoinOperator.java:257)
at org.apache.hadoop.hive.ql.exec.mr.ExecReducer.close(ExecReducer.java:298)
... 8 more
Caused by: java.lang.ArrayIndexOutOfBoundsException: 0
at org.apache.hadoop.hive.ql.exec.persistence.RowContainer.first(RowContainer.java:220)
... 13 more
Could someone help me?
Update my code:
Select distinct jabberUseragent.gameID,agentPlayInfo.gameLabel,jabberUseragent.userAgent,CONCAT(CONCAT(CONCAT(triggerUsageStart.generateDate,' '),triggerUsageStart.timezone),CONCAT(' ',triggerUsageStart.generateTime)) as generateDate,(unix_timestamp(CONCAT(CONCAT(triggerUsageStop.generateDate,' '),triggerUsageStop.generateTime)) - unix_timestamp(CONCAT(CONCAT(triggerUsageStart.generateDate,' '),triggerUsageStart.generateTime))) from
(Select gameSession,gameID,userAgent from(Select distinct regexp_extract(t.payload,'playRequestId:(.*), playRequest',1) as gameSession,regexp_extract(t.payload,'gameId:(.*), userAgent:',1) as gameID,regexp_extract(t.payload,', userAgent:(.*), agentLocation',1) as userAgent,payload from (select * from ${hiveconf:DATA_BASE} p where p.dt >= '${hiveconf:LOW_DATE}' and p.dt <= '${hiveconf:UPPER_DATE}') t where CONCAT(t.generatedate,t.generatetime) >= CONCAT('${hiveconf:LOW_DATE}','${hiveconf:LOW_TIME}') and CONCAT(t.generatedate,t.generatetime) <= CONCAT('${hiveconf:UPPER_DATE}','${hiveconf:UPPER_TIME}'))jabberUseragent where jabberUseragent.gameSession!='' and jabberUseragent.userAgent!='') jabberUseragent
join
(Select gameID,gameLabel from(Select distinct regexp_extract(t.payload,'gameId=(.*),gameLabel=.*,configFilePath',1) as gameID,regexp_extract(t.payload,'gameId=.*,gameLabel=(.*),configFilePath',1) as gameLabel,payload from (select * from ${hiveconf:DATA_BASE} p where p.dt >= '${hiveconf:LOW_DATE}' and p.dt <= '${hiveconf:UPPER_DATE}') t where CONCAT(t.generatedate,t.generatetime) >= CONCAT('${hiveconf:LOW_DATE}','${hiveconf:LOW_TIME}') and CONCAT(t.generatedate,t.generatetime) <= CONCAT('${hiveconf:UPPER_DATE}','${hiveconf:UPPER_TIME}'))agentPlayInfo where agentPlayInfo.gameID!='' and agentPlayInfo.gameLabel!='') agentPlayInfo
join
(Select gameSession,generateDate,generateTime,timezone,payload from(Select distinct regexp_extract(t.payload,'GAME_SESSION=.*((.{8})-(.{4})-(.{4})-(.{4})-(.{12}))\" USAGE=\"([\\w\\-\\(\\)\\.]*,){41}9.*\"',1) as gameSession,generateDate,generateTime,timezone,payload from (select * from ${hiveconf:DATA_BASE} p where p.dt >= '${hiveconf:LOW_DATE}' and p.dt <= '${hiveconf:UPPER_DATE}') t where t.payload like '%[e] usage_record%' and CONCAT(t.generatedate,t.generatetime) <= CONCAT('${hiveconf:UPPER_DATE}','${hiveconf:UPPER_TIME}') and CONCAT(t.generatedate,t.generatetime) >= CONCAT('${hiveconf:LOW_DATE}','${hiveconf:LOW_TIME}'))triggerStart where triggerStart.gameSession!='')triggerUsageStart
join
(Select gameSession,generateDate,generateTime,timezone,payload from(Select distinct regexp_extract(t.payload,'GAME_SESSION=.*((.{8})-(.{4})-(.{4})-(.{4})-(.{12}))\" USAGE=\"([\\w\\-\\(\\)\\.]*,){41}[1-5].*\"',1) as gameSession,generateDate,generateTime,timezone,payload from (select * from ${hiveconf:DATA_BASE} p where p.dt >= '${hiveconf:LOW_DATE}' and p.dt <= '${hiveconf:UPPER_DATE}') t where t.payload like '%[e] usage_record%' and CONCAT(t.generatedate,t.generatetime) <= CONCAT('${hiveconf:UPPER_DATE}','${hiveconf:UPPER_TIME}') and CONCAT(t.generatedate,t.generatetime) >= CONCAT('${hiveconf:LOW_DATE}','${hiveconf:LOW_TIME}'))triggerStop where triggerStop.gameSession!='')triggerUsageStop
on (jabberUseragent.gameSession = triggerUsageStart.gameSession and triggerUsageStart.gameSession = triggerUsageStop.gameSession and jabberUseragent.gameID = agentPlayInfo.gameID) order by generateDate
Sorry, I can't share my samples.
By the way, I've also got this exception before I got "ArrayIndexOutOfBoundException"
javax.jdo.JDODataStoreException: Error executing SQL query "select PARTITIONS.PART_ID from PARTITIONS inner join TBLS on PARTITIONS.TBL_ID = TBLS.TBL_ID inner join DBS on TBLS.DB_ID = DBS.DB_ID where TBLS.TBL_NAME = ? and DBS.NAME = ? and PARTITIONS.PART_NAME in (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)".
at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451)
at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321)
at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:181)
at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:82)
at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByNamesInternal(ObjectStore.java:1717)
at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByNames(ObjectStore.java:1700)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)......
NestedThrowablesStackTrace:
org.postgresql.util.PSQLException: ERROR: relation "partitions" does not exist
at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:1591)
at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:1340)
at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:192)
at org.postgresql.jdbc2.AbstractJdbc2Statement.execute(AbstractJdbc2Statement.java:471)
at org.postgresql.jdbc2.AbstractJdbc2Statement.executeWithFlags(AbstractJdbc2Statement.java:373)
at org.postgresql.jdbc2.AbstractJdbc2Statement.executeQuery(AbstractJdbc2Statement.java:258)......
Based on the information provided, only this can be a sensible solution to your problem.
I have put the method definition for reference. Please go through it to understand
If you run down the source code closely, there are two areas / possibilities where ArrayIndexOutOfBoundException can be thrown.
Accessing the array values of input splits from Configuration
Reading the Row from currentReadBlock array (this is mostly not the case for the exception since it's size is greater than 0)
Please check your set of input files for job because InputFormat#split() method returns an array of InputSplit type. Each InputSplit is then assigned to an individual Mapper for processing. Mostly, the exception occurs while accessing this InputSplit[] array.

Categories

Resources