I have a table with a column of type JSON in my PostgreSQL DB (9.2). I have a hard time to map this column to a JPA2 Entity field type.
I tried to use String but when I save the entity I get an exception that it can't convert character varying to JSON.
What is the correct value type to use when dealing with a JSON column?
#Entity
public class MyEntity {
private String jsonPayload; // this maps to a json column
public MyEntity() {
}
}
A simple workaround would be to define a text column.
If you're interested, here are a few code snippets to get the Hibernate custom user type in place. First extend the PostgreSQL dialect to tell it about the json type, thanks to Craig Ringer for the JAVA_OBJECT pointer:
import org.hibernate.dialect.PostgreSQL9Dialect;
import java.sql.Types;
/**
* Wrap default PostgreSQL9Dialect with 'json' type.
*
* #author timfulmer
*/
public class JsonPostgreSQLDialect extends PostgreSQL9Dialect {
public JsonPostgreSQLDialect() {
super();
this.registerColumnType(Types.JAVA_OBJECT, "json");
}
}
Next implement org.hibernate.usertype.UserType. The implementation below maps String values to the json database type, and vice-versa. Remember Strings are immutable in Java. A more complex implementation could be used to map custom Java beans to JSON stored in the database as well.
package foo;
import org.hibernate.HibernateException;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.usertype.UserType;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
/**
* #author timfulmer
*/
public class StringJsonUserType implements UserType {
/**
* Return the SQL type codes for the columns mapped by this type. The
* codes are defined on <tt>java.sql.Types</tt>.
*
* #return int[] the typecodes
* #see java.sql.Types
*/
#Override
public int[] sqlTypes() {
return new int[] { Types.JAVA_OBJECT};
}
/**
* The class returned by <tt>nullSafeGet()</tt>.
*
* #return Class
*/
#Override
public Class returnedClass() {
return String.class;
}
/**
* Compare two instances of the class mapped by this type for persistence "equality".
* Equality of the persistent state.
*
* #param x
* #param y
* #return boolean
*/
#Override
public boolean equals(Object x, Object y) throws HibernateException {
if( x== null){
return y== null;
}
return x.equals( y);
}
/**
* Get a hashcode for the instance, consistent with persistence "equality"
*/
#Override
public int hashCode(Object x) throws HibernateException {
return x.hashCode();
}
/**
* Retrieve an instance of the mapped class from a JDBC resultset. Implementors
* should handle possibility of null values.
*
* #param rs a JDBC result set
* #param names the column names
* #param session
* #param owner the containing entity #return Object
* #throws org.hibernate.HibernateException
*
* #throws java.sql.SQLException
*/
#Override
public Object nullSafeGet(ResultSet rs, String[] names, SessionImplementor session, Object owner) throws HibernateException, SQLException {
if(rs.getString(names[0]) == null){
return null;
}
return rs.getString(names[0]);
}
/**
* Write an instance of the mapped class to a prepared statement. Implementors
* should handle possibility of null values. A multi-column type should be written
* to parameters starting from <tt>index</tt>.
*
* #param st a JDBC prepared statement
* #param value the object to write
* #param index statement parameter index
* #param session
* #throws org.hibernate.HibernateException
*
* #throws java.sql.SQLException
*/
#Override
public void nullSafeSet(PreparedStatement st, Object value, int index, SessionImplementor session) throws HibernateException, SQLException {
if (value == null) {
st.setNull(index, Types.OTHER);
return;
}
st.setObject(index, value, Types.OTHER);
}
/**
* Return a deep copy of the persistent state, stopping at entities and at
* collections. It is not necessary to copy immutable objects, or null
* values, in which case it is safe to simply return the argument.
*
* #param value the object to be cloned, which may be null
* #return Object a copy
*/
#Override
public Object deepCopy(Object value) throws HibernateException {
return value;
}
/**
* Are objects of this type mutable?
*
* #return boolean
*/
#Override
public boolean isMutable() {
return true;
}
/**
* Transform the object into its cacheable representation. At the very least this
* method should perform a deep copy if the type is mutable. That may not be enough
* for some implementations, however; for example, associations must be cached as
* identifier values. (optional operation)
*
* #param value the object to be cached
* #return a cachable representation of the object
* #throws org.hibernate.HibernateException
*
*/
#Override
public Serializable disassemble(Object value) throws HibernateException {
return (String)this.deepCopy( value);
}
/**
* Reconstruct an object from the cacheable representation. At the very least this
* method should perform a deep copy if the type is mutable. (optional operation)
*
* #param cached the object to be cached
* #param owner the owner of the cached object
* #return a reconstructed object from the cachable representation
* #throws org.hibernate.HibernateException
*
*/
#Override
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return this.deepCopy( cached);
}
/**
* During merge, replace the existing (target) value in the entity we are merging to
* with a new (original) value from the detached entity we are merging. For immutable
* objects, or null values, it is safe to simply return the first parameter. For
* mutable objects, it is safe to return a copy of the first parameter. For objects
* with component values, it might make sense to recursively replace component values.
*
* #param original the value from the detached entity being merged
* #param target the value in the managed entity
* #return the value to be merged
*/
#Override
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return original;
}
}
Now all that's left is annotating the entities. Put something like this at the entity's class declaration:
#TypeDefs( {#TypeDef( name= "StringJsonObject", typeClass = StringJsonUserType.class)})
Then annotate the property:
#Type(type = "StringJsonObject")
public String getBar() {
return bar;
}
Hibernate will take care of creating the column with json type for you, and handle the mapping back and forth. Inject additional libraries into the user type implementation for more advanced mapping.
Here's a quick sample GitHub project if anyone wants to play around with it:
https://github.com/timfulmer/hibernate-postgres-jsontype
See PgJDBC bug #265.
PostgreSQL is excessively, annoyingly strict about data type conversions. It won't implicitly cast text even to text-like values such as xml and json.
The strictly correct way to solve this problem is to write a custom Hibernate mapping type that uses the JDBC setObject method. This can be a fair bit of hassle, so you might just want to make PostgreSQL less strict by creating a weaker cast.
As noted by #markdsievers in the comments and this blog post, the original solution in this answer bypasses JSON validation. So it's not really what you want. It's safer to write:
CREATE OR REPLACE FUNCTION json_intext(text) RETURNS json AS $$
SELECT json_in($1::cstring);
$$ LANGUAGE SQL IMMUTABLE;
CREATE CAST (text AS json) WITH FUNCTION json_intext(text) AS IMPLICIT;
AS IMPLICIT tells PostgreSQL it can convert without being explicitly told to, allowing things like this to work:
regress=# CREATE TABLE jsontext(x json);
CREATE TABLE
regress=# PREPARE test(text) AS INSERT INTO jsontext(x) VALUES ($1);
PREPARE
regress=# EXECUTE test('{}')
INSERT 0 1
Thanks to #markdsievers for pointing out the issue.
Maven dependency
The first thing you need to do is to set up the following Hibernate Types Maven dependency in your project pom.xml configuration file:
<dependency>
<groupId>com.vladmihalcea</groupId>
<artifactId>hibernate-types-52</artifactId>
<version>${hibernate-types.version}</version>
</dependency>
Domain model
Now, you need to declare the JsonType on either class level or in a package-info.java package-level descriptor, like this:
#TypeDef(name = "json", typeClass = JsonType.class)
And, the entity mapping will look like this:
#Type(type = "json")
#Column(columnDefinition = "jsonb")
private Location location;
If you're using Hibernate 5 or later, then the JSON type is registered automatically by the Postgre92Dialect.
Otherwise, you need to register it yourself:
public class PostgreSQLDialect extends PostgreSQL91Dialect {
public PostgreSQL92Dialect() {
super();
this.registerColumnType( Types.JAVA_OBJECT, "jsonb" );
}
}
In case someone is interested, you can use JPA 2.1 #Convert / #Converter functionality with Hibernate. You would have to use the pgjdbc-ng JDBC driver though. This way you don't have to use any proprietary extensions, dialects and custom types per field.
#javax.persistence.Converter
public static class MyCustomConverter implements AttributeConverter<MuCustomClass, String> {
#Override
#NotNull
public String convertToDatabaseColumn(#NotNull MuCustomClass myCustomObject) {
...
}
#Override
#NotNull
public MuCustomClass convertToEntityAttribute(#NotNull String databaseDataAsJSONString) {
...
}
}
...
#Convert(converter = MyCustomConverter.class)
private MyCustomClass attribute;
I tried many methods I found on the Internet, most of them are not working, some of them are too complex. The below one works for me and is much more simple if you don't have that strict requirements for PostgreSQL type validation.
Make PostgreSQL jdbc string type as unspecified, like
<connection-url>
jdbc:postgresql://localhost:test?stringtype=unspecified
</connection-url>
I had a similar problem with Postgres (javax.persistence.PersistenceException: org.hibernate.MappingException: No Dialect mapping for JDBC type: 1111) when executing native queries (via EntityManager) that retrieved json fields in the projection although the Entity class has been annotated with TypeDefs.
The same query translated in HQL was executed without any problem.
To solve this I had to modify JsonPostgreSQLDialect this way:
public class JsonPostgreSQLDialect extends PostgreSQL9Dialect {
public JsonPostgreSQLDialect() {
super();
this.registerColumnType(Types.JAVA_OBJECT, "json");
this.registerHibernateType(Types.OTHER, "myCustomType.StringJsonUserType");
}
Where myCustomType.StringJsonUserType is the class name of the class implementing the json type (from above, Tim Fulmer answer) .
There is an easier to to do this which doesn't involve creating a function by using WITH INOUT
CREATE TABLE jsontext(x json);
INSERT INTO jsontext VALUES ($${"a":1}$$::text);
ERROR: column "x" is of type json but expression is of type text
LINE 1: INSERT INTO jsontext VALUES ($${"a":1}$$::text);
CREATE CAST (text AS json)
WITH INOUT
AS ASSIGNMENT;
INSERT INTO jsontext VALUES ($${"a":1}$$::text);
INSERT 0 1
I was running into this and didn't want to enable stuff via connection string, and allow implicit conversions. At first I tried to use #Type, but because I'm using a custom converter to serialize/deserialize a Map to/from JSON, I couldn't apply a #Type annotation. Turns out I just needed to specify columnDefinition = "json" in my #Column annotation.
#Convert(converter = HashMapConverter.class)
#Column(name = "extra_fields", columnDefinition = "json")
private Map<String, String> extraFields;
All the above solution did not work for me. Finally I made use of native queries to insert the data.
Step -1 Create an abstract class AbstractEntity which will implements Persistable
with annotation #MappedSuperclass (part of javax.persistence)
Step -2 In this class create your sequence generator because you can not generate a sequencer with the native queries. #Id #GeneratedValues #Column private Long seqid;
Dont forget - Your entity class should extends your abstract class. (helping your sequence to work as well it may works on date as well(check for date i am not sure))
Step- 3 In repo interface write the native query.
value="INSERT INTO table(?,?)values(:?,:cast(:jsonString as json))",nativeQuery=true
Step - 4 This will convert your java string object to json and insert/store in database and also you will be able to increment the sequence on each insertion as well.
I got casting error when I worked using converter. Also type-52 personally I avoided to use that in my project.
Please upvote my ans if it works for you guys.
I ran into this issue when I migrated my projects from MySQL 8.0.21 to Postgres 13. My project uses Spring boot with the Hibernate types dependency version 2.7.1. In my case the solution was simple.
All I needed to do was change that and it worked.
Referenced from the Hibernate Types Documentation page.
I encountered the column "roles" is of type json but expression is of type character varying exception with the following entity with Postgres:
#Entity
#TypeDefs(#TypeDef(name = "json", typeClass = JsonBinaryType.class))
#Data
#AllArgsConstructor
#NoArgsConstructor
#Builder
#EqualsAndHashCode(of = "extId")
public class ManualTaskUser {
#Id
private String extId;
#Type(type = "json")
#Column(columnDefinition = "json")
private Set<Role> roles;
}
It should be mentioned that Role is an enum and not a POJO.
In the generated SQL I could see that the Set was correctly serialized like this: ["SYSTEM","JOURNEY","ADMIN","OBJECTION","DEVOPS","ASSESSMENT"].
Changing the typeClass in the TypeDef annotation from JsonStringType to JsonBinaryType solved the problem! Thanks to Joseph Waweru for the hint!
Related
After going through some tutorials and initial document reading from the docs.spring.org reference I understood that it is created in the controller of a POJO class created by the developer.
But while reading this I came across the paragraph below:
An #ModelAttribute on a method argument indicates the argument should be retrieved from the model. If not present in the model, the argument should be instantiated first and then added to the model. Once present in the model, the argument's fields should be populated from all request parameters that have matching names. This is known as data binding in Spring MVC, a very useful mechanism that saves you from having to parse each form field individually.
#RequestMapping(value="/owners/{ownerId}/pets/{petId}/edit", method = RequestMethod.POST)
public String processSubmit(#ModelAttribute Pet pet) {
}
Spring Documentation
In the paragraph what is most disturbing is the line:
"If not present in the model ... "
How can the data be there in the model? (Because we have not created a model - it will be created by us.)
Also, I have seen a few controller methods accepting the Model type as an argument. What does that mean? Is it getting the Model created somewhere? If so who is creating it for us?
If not present in the model, the argument should be instantiated first and then added to the model.
The paragraph describes the following piece of code:
if (mavContainer.containsAttribute(name)) {
attribute = mavContainer.getModel().get(name);
} else {
// Create attribute instance
try {
attribute = createAttribute(name, parameter, binderFactory, webRequest);
}
catch (BindException ex) {
...
}
}
...
mavContainer.addAllAttributes(attribute);
(taken from ModelAttributeMethodProcessor#resolveArgument)
For every request, Spring initialises a ModelAndViewContainer instance which records model and view-related decisions made by HandlerMethodArgumentResolvers and HandlerMethodReturnValueHandlers during the course of invocation of a controller method.
A newly-created ModelAndViewContainer object is initially populated with flash attributes (if any):
ModelAndViewContainer mavContainer = new ModelAndViewContainer();
mavContainer.addAllAttributes(RequestContextUtils.getInputFlashMap(request));
It means that the argument won't be initialised if it already exists in the model.
To prove it, let's move to a practical example.
The Pet class:
public class Pet {
private String petId;
private String ownerId;
private String hiddenField;
public Pet() {
System.out.println("A new Pet instance was created!");
}
// setters and toString
}
The PetController class:
#RestController
public class PetController {
#GetMapping(value = "/internal")
public void invokeInternal(#ModelAttribute Pet pet) {
System.out.println(pet);
}
#PostMapping(value = "/owners/{ownerId}/pets/{petId}/edit")
public RedirectView editPet(#ModelAttribute Pet pet, RedirectAttributes attributes) {
System.out.println(pet);
pet.setHiddenField("XXX");
attributes.addFlashAttribute("pet", pet);
return new RedirectView("/internal");
}
}
Let's make a POST request to the URI /owners/123/pets/456/edit and see the results:
A new Pet instance was created!
Pet[456,123,null]
Pet[456,123,XXX]
A new Pet instance was created!
Spring created a ModelAndViewContainer and didn't find anything to fill the instance with (it's a request from a client; there weren't any redirects). Since the model is empty, Spring had to create a new Pet object by invoking the default constructor which printed the line.
Pet[456,123,null]
Once present in the model, the argument's fields should be populated from all request parameters that have matching names.
We printed the given Pet to make sure all the fields petId and ownerId had been bound correctly.
Pet[456,123,XXX]
We set hiddenField to check our theory and redirected to the method invokeInternal which also expects a #ModelAttribute. As we see, the second method received the instance (with own hidden value) which was created for the first method.
To answer the question i found few snippets of code with the help of #andrew answer. Which justify a ModelMap instance[a model object] is created well before our controller/handler is called for specific URL
public class ModelAndViewContainer {
private boolean ignoreDefaultModelOnRedirect = false;
#Nullable
private Object view;
private final ModelMap defaultModel = new BindingAwareModelMap();
....
.....
}
If we see the above snippet code (taken from spring-webmvc-5.0.8 jar). BindingAwareModelMap model object is created well before.
For better Understanding adding the comments for the class BindingAwareModelMap
/**
* Subclass of {#link org.springframework.ui.ExtendedModelMap} that automatically removes
* a {#link org.springframework.validation.BindingResult} object if the corresponding
* target attribute gets replaced through regular {#link Map} operations.
*
* <p>This is the class exposed to handler methods by Spring MVC, typically consumed through
* a declaration of the {#link org.springframework.ui.Model} interface. There is no need to
* build it within user code; a plain {#link org.springframework.ui.ModelMap} or even a just
* a regular {#link Map} with String keys will be good enough to return a user model.
*
#SuppressWarnings("serial")
public class BindingAwareModelMap extends ExtendedModelMap {
....
....
}
In my db entities i have byte[] fields:
import javax.persistence.*;
/**
* Account
*/
#Entity
#Table(name = TABLE)
public class Account {
public static final String TABLE = "Account";
...
public final static String COLUMN_PASSWORD_HASH = "passwordHash";
#Column(name = COLUMN_PASSWORD_HASH, nullable = false)
public byte[] passwordHash;
...
I want to keep my db entities clear of any vendor dependency so i use only JPA annotations and try to avoid any ORMLite or Hibernate annotations.
However when trying to save such entity with ORMLite i get the following error:
java.sql.SQLException: ORMLite does not know how to store class [B for field 'passwordHash'. byte[] fields must specify dataType=DataType.BYTE_ARRAY or SERIALIZABLE
As far as i understand for some reason ORMLite does not prefer BYTE_ARRAY for byte[] and requires to mark the fields with com.j256.ormlite.field.Datatype ORMLite annotation with introduces explicit dependency on ormlite-core module and this is what i want to avoid (i have Hibernate DAO impl and ORMLite DAO impl and i don't want to mix everything).
My original intention was to configure ORMLite to prefer BYTE_ARRAY for byte[] fields. How can i do it? Should i introduce custom persister? Any other suggestions?
I solved it by adding the following custom data persister (without adding dependency to ormlite-core as i wanted):
package name.antonsmirnov.zzz.dao.types;
import com.j256.ormlite.field.SqlType;
import com.j256.ormlite.field.types.ByteArrayType;
/**
* ByteArray Type that prefers storing byte[] as BYTE_ARRAY
*/
public class PreferByteArrayType extends ByteArrayType {
public PreferByteArrayType() {
super(SqlType.BYTE_ARRAY, new Class[] { byte[].class });
}
private static final PreferByteArrayType singleTon = new PreferByteArrayType();
public static PreferByteArrayType getSingleton() {
return singleTon;
}
}
Register it just like any other custom persister:
DataPersisterManager.registerDataPersisters(PreferByteArrayType.getSingleton());
Note you can't use default ByteArrayDataType because it has empty classes array so it causes it to become persister for autogenerated fields and it throws exception that byte array fields can't be id fields.
I've checked it to use BLOB fields type for MySQL:
com.mysql.jdbc.Field#39a2bb97[catalog=test_db,tableName=account,originalTableName=account,columnName=passwordHash,originalColumnName=passwordHash,mysqlType=252(FIELD_TYPE_BLOB),flags= BINARY BLOB, charsetIndex=63, charsetName=ISO-8859-1]
Creating a restful api for a web application in Spring is pretty easy.
Let's say we have a Movie entity, with a name, year, list of genres and list of actors. In order to return a list of all movies in json format, we just create a method in some controller that will query a database and return a list as a body of ResponseEntity. Spring will magically serialize it, and all works great :)
But, what if I, in some case, want that list of actors in a movie to be serialized, and not in other? And in some other case, alongside the fields of the movie class, I need to add some other properties, for each movie in the list, which values are dynamically generated?
My current solution is to use #JsonIgnore on some fields or to create a MovieResponse class with fields like in Movie class and additional fields that are needed, and to convert from Movie to MovieResponse class each time.
Is there a better way to do this?
The point of the JSONIgnore annotation is to tell the DispatcherServlet (or whatever component in Spring handles rendering the response) to ignore certain fields if those fields are null or otherwise omitted.
This can provide you with some flexibility in terms of what data you expose to the client in certain cases.
Downside to JSONIgnore:
However, there are some downsides to using this annotation that I've recently encountered in my own projects. This applies mainly to the PUT method and cases where the object that your controller serializes data to is the same object that is used to store that data in the database.
The PUT method implies that you're either creating a new collection on the server or are replacing a collection on the server with the new collection you're updating.
Example of Replacing a Collection on the server:
Imagine that you're making a PUT request to your server, and the RequestBody contains a serialized Movie entity, but this Movie entity contains no actors because you've omitted them! Later on down the road, you implement a new feature that allows your users to edit and correct spelling errors in the Movie description, and you use PUT to send the Movie entity back to the server, and you update the database.
But, let's say that -- because it's been so long since you added JSONIgnore to your objects -- you've forgotten that certain fields are optional. In the client side, you forget to include the collection of actors, and now your user accidentally overwrites Movie A with actors B, C, and D, with Movie A with no actors whatsoever!
Why is JSONIgnore opt-in?
It stands to reason that the intention behind forcing you to opt-out of making certain fields required is precisely so that these types of data integrity issues are avoided. In a world where you're not using JSONIgnore, you guarantee that your data can never be replaced with partial data unless you explicitly set that data yourself. With JSONIgnore, you remove these safeguards.
With that said, JSONIgnore is very valuable, and I use it myself in precisely the same manner to reduce the size of the payload sent to the client. However, I'm beginning to rethink this strategy and instead opt for one where I use POJO classes in a separate layer for sending data to the frontend than what I use to interact with the database.
Possible Better Setup?:
The ideal setup -- from my experience dealing with this particular problem -- is to use Constructor injection for your Entity objects instead of setters. Force yourself to have to pass in every parameter at instantiation time so that your entities are never partially filled. If you try to partially fill them, the compiler stops you from doing something you may regret.
For sending data to the client side, where you may want to omit certain pieces of data, you could use a separate, disconnected entity POJO, or use a JSONObject from org.json.
When sending data from the client to the server, your frontend entity objects receive the data from the model database layer, partially or full, since you don't really care if the frontend gets partial data. But then when storing the data in the datastore, you would first fetch the already-stored object from the datastore, update its properties, and then store it back in the datastore. In other words, if you were missing the actors, it wouldn't matter because the object you're updating from the datastore already has the actors assigned to it's properties. Thus, you only replace the fields that you explicitly intend to replace.
While there would be more maintenance overhead and complexity to this setup, you would gain a powerful advantage: The Java compiler would have your back! It won't let you or even a hapless colleague do anything in the code that might compromise the data in the datastore. If you attempt to create an entity on the fly in your model layer, you'll be forced to use the constructor, and forced to provide all of the data. If you don't have all of the data and cannot instantiate the object, then you'll either need to pass empty values (which should signal a red flag to you) or fetch that data from the datastore first.
I ran into this problem, and really wanted to keep using #JsonIgnore, but also use the entities/POJO's to use in the JSON calls.
After a lot of digging I came up with the solution of automatically retrieving the ignored fields from the database, on every call of the object mapper.
Ofcourse there are some requirements which are needed for this solution. Like you have to use the repository, but in my case this works just the way I need it.
For this to work you need to make sure the ObjectMapper in MappingJackson2HttpMessageConverter is intercepted and the fields marked with #JsonIgnore are filled. Therefore we need our own MappingJackson2HttpMessageConverter bean:
public class MvcConfig extends WebMvcConfigurerAdapter {
#Override
public void extendMessageConverters(List<HttpMessageConverter<?>> converters) {
for (HttpMessageConverter converter : converters) {
if (converter instanceof MappingJackson2HttpMessageConverter) {
((MappingJackson2HttpMessageConverter)converter).setObjectMapper(objectMapper());
}
}
}
#Bean
public ObjectMapper objectMapper() {
ObjectMapper objectMapper = new FillIgnoredFieldsObjectMapper();
Jackson2ObjectMapperBuilder.json().configure(objectMapper);
return objectMapper;
}
}
Each JSON request is than converted into an object by our own objectMapper, which fills the ignored fields by retrieving them from the repository:
/**
* Created by Sander Agricola on 18-3-2015.
*
* When fields or setters are marked as #JsonIgnore, the field is not read from the JSON and thus left empty in the object
* When the object is a persisted entity it might get stored without these fields and overwriting the properties
* which where set in previous calls.
*
* To overcome this property entities with ignored fields are detected. The same object is than retrieved from the
* repository and all ignored fields are copied from the database object to the new object.
*/
#Component
public class FillIgnoredFieldsObjectMapper extends ObjectMapper {
final static Logger logger = LoggerFactory.getLogger(FillIgnoredFieldsObjectMapper.class);
#Autowired
ListableBeanFactory listableBeanFactory;
#Override
protected Object _readValue(DeserializationConfig cfg, JsonParser jp, JavaType valueType) throws IOException, JsonParseException, JsonMappingException {
Object result = super._readValue(cfg, jp, valueType);
fillIgnoredFields(result);
return result;
}
#Override
protected Object _readMapAndClose(JsonParser jp, JavaType valueType) throws IOException, JsonParseException, JsonMappingException {
Object result = super._readMapAndClose(jp, valueType);
fillIgnoredFields(result);
return result;
}
/**
* Find all ignored fields in the object, and fill them with the value as it is in the database
* #param resultObject Object as it was deserialized from the JSON values
*/
public void fillIgnoredFields(Object resultObject) {
Class c = resultObject.getClass();
if (!objectIsPersistedEntity(c)) {
return;
}
List ignoredFields = findIgnoredFields(c);
if (ignoredFields.isEmpty()) {
return;
}
Field idField = findIdField(c);
if (idField == null || getValue(resultObject, idField) == null) {
return;
}
CrudRepository repository = findRepositoryForClass(c);
if (repository == null) {
return;
}
//All lights are green: fill the ignored fields with the persisted values
fillIgnoredFields(resultObject, ignoredFields, idField, repository);
}
/**
* Fill the ignored fields with the persisted values
*
* #param object Object as it was deserialized from the JSON values
* #param ignoredFields List with fields which are marked as JsonIgnore
* #param idField The id field of the entity
* #param repository The repository for the entity
*/
private void fillIgnoredFields(Object object, List ignoredFields, Field idField, CrudRepository repository) {
logger.debug("Object {} contains fields with #JsonIgnore annotations, retrieving their value from database", object.getClass().getName());
try {
Object storedObject = getStoredObject(getValue(object, idField), repository);
if (storedObject == null) {
return;
}
for (Field field : ignoredFields) {
field.set(object, getValue(storedObject, field));
}
} catch (IllegalAccessException e) {
logger.error("Unable to fill ignored fields", e);
}
}
/**
* Get the persisted object from database.
*
* #param id The id of the object (most of the time an int or string)
* #param repository The The repository for the entity
* #return The object as it is in the database
* #throws IllegalAccessException
*/
#SuppressWarnings("unchecked")
private Object getStoredObject(Object id, CrudRepository repository) throws IllegalAccessException {
return repository.findOne((Serializable)id);
}
/**
* Get the value of a field for an object
*
* #param object Object with values
* #param field The field we want to retrieve
* #return The value of the field in the object
*/
private Object getValue(Object object, Field field) {
try {
field.setAccessible(true);
return field.get(object);
} catch (IllegalAccessException e) {
logger.error("Unable to access field value", e);
return null;
}
}
/**
* Test if the object is a persisted entity
* #param c The class of the object
* #return true when it has an #Entity annotation
*/
private boolean objectIsPersistedEntity(Class c) {
return c.isAnnotationPresent(Entity.class);
}
/**
* Find the right repository for the class. Needed to retrieve the persisted object from database
*
* #param c The class of the object
* #return The (Crud)repository for the class.
*/
private CrudRepository findRepositoryForClass(Class c) {
return (CrudRepository)new Repositories(listableBeanFactory).getRepositoryFor(c);
}
/**
* Find the Id field of the object, the Id field is the field with the #Id annotation
*
* #param c The class of the object
* #return the id field
*/
private Field findIdField(Class c) {
for (Field field : c.getDeclaredFields()) {
if (field.isAnnotationPresent(Id.class)) {
return field;
}
}
return null;
}
/**
* Find a list of all fields which are ignored by json.
* In some cases the field itself is not ignored, but the setter is. In this case this field is also returned.
*
* #param c The class of the object
* #return List with ignored fields
*/
private List findIgnoredFields(Class c) {
List ignoredFields = new ArrayList();
for (Field field : c.getDeclaredFields()) {
//Test if the field is ignored, or the setter is ignored.
//When the field is ignored it might be overridden by the setter (by adding #JsonProperty to the setter)
if (fieldIsIgnored(field) ? setterDoesNotOverrideIgnore(field) : setterIsIgnored(field)) {
ignoredFields.add(field);
}
}
return ignoredFields;
}
/**
* #param field The field we want to retrieve
* #return True when the field is ignored by json
*/
private boolean fieldIsIgnored(Field field) {
return field.isAnnotationPresent(JsonIgnore.class);
}
/**
* #param field The field we want to retrieve
* #return true when the setter is ignored by json
*/
private boolean setterIsIgnored(Field field) {
return annotationPresentAtSetter(field, JsonIgnore.class);
}
/**
* #param field The field we want to retrieve
* #return true when the setter is NOT ignored by json, overriding the property of the field.
*/
private boolean setterDoesNotOverrideIgnore(Field field) {
return !annotationPresentAtSetter(field, JsonProperty.class);
}
/**
* Test if an annotation is present at the setter of a field.
*
* #param field The field we want to retrieve
* #param annotation The annotation looking for
* #return true when the annotation is present
*/
private boolean annotationPresentAtSetter(Field field, Class annotation) {
try {
Method setter = getSetterForField(field);
return setter.isAnnotationPresent(annotation);
} catch (NoSuchMethodException e) {
return false;
}
}
/**
* Get the setter for the field. The setter is found based on the name with "set" in front of it.
* The type of the field must be the only parameter for the method
*
* #param field The field we want to retrieve
* #return Setter for the field
* #throws NoSuchMethodException
*/
#SuppressWarnings("unchecked")
private Method getSetterForField(Field field) throws NoSuchMethodException {
Class c = field.getDeclaringClass();
return c.getDeclaredMethod(getSetterName(field.getName()), field.getType());
}
/**
* Build the setter name for a fieldName.
* The Setter name is the name of the field with "set" in front of it. The first character of the field
* is set to uppercase;
*
* #param fieldName The name of the field
* #return The name of the setter
*/
private String getSetterName(String fieldName) {
return String.format("set%C%s", fieldName.charAt(0), fieldName.substring(1));
}
}
Maybe not the most clean solution in all cases, but in my case it does the trick just the way I want it to work.
Situation:
I have a persistable class with variable of java.util.Date type:
import java.util.Date;
#Entity
#Table(name = "prd_period")
#Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
public class Period extends ManagedEntity implements Interval {
#Column(name = "startdate_", nullable = false)
private Date startDate;
}
Corresponding table in DB:
CREATE TABLE 'prd_period' (
'id_' bigint(20) NOT NULL AUTO_INCREMENT,
...
'startdate_' datetime NOT NULL
)
Then I save my Period object to DB:
Period p = new Period();
Date d = new Date();
p.setStartDate();
myDao.save(p);
After then if I'm trying to extract my object from DB, it is returned with variable startDate of Timestamp type - and all the places where I'm trying to use equals(...) are returning false.
Question: are there any means to force Hibernate to return dates as object of java.util.Date type instead of Timestamp without explicit modification of every such variable (e.g it must be able just work, without explicit modification of existed variables of java.util.Date type)?
NOTE:
I found number of explicit solutions, where annotations are used or setter is modified - but I have many classes with Date-variables - so I need some centralized solution and all that described below is not good enough:
Using annotation #Type: - java.sql.Date will be returned
#Column
#Type(type="date")
private Date startDate;
Using annotation #Temporal(TemporalType.DATE) - java.sql.Date will be returned
#Temporal(TemporalType.DATE)
#Column(name=”CREATION_DATE”)
private Date startDate;
By modifying setter (deep copy) - java.util.Date will be returned
public void setStartDate(Date startDate) {
if (startDate != null) {
this.startDate = new Date(startDate.getTime());
} else {
this.startDate = null;
}
}
By creation of my own type: - java.util.Date will be returned
Details are given here:
http://blogs.sourceallies.com/2012/02/hibernate-date-vs-timestamp/
So, I spent some time with this issue and found a solution. It is not pretty one, but at least a start point - maybe someone will supplement this with some useful comments.
Some info about mapping that I found in process:
Class that contains basic mapping of Hibernate types to property types is org.hibernate.type.TypeFactory. All this mappings are stored in unmodifiable map
private static final Map BASIC_TYPES;
...
basics.put( java.util.Date.class.getName(), Hibernate.TIMESTAMP );
...
BASIC_TYPES = Collections.unmodifiableMap( basics );
As you can see with java.util.Date type assosited with Hibernate type org.hibernate.type.TimestampType
Next interesting moment - creation of Hibernate org.hibernate.cfg.Configuration - object that contains all info about mapped classes. This classes and their properties can be extracted like this:
Iterator clsMappings = cfg.getClassMappings();
while(clsMappings.hasNext()){
PersistentClass mapping = (PersistentClass) clsMappings.next();
handleProperties(mapping.getPropertyIterator(), map);
}
Vast majority of properties are the objects of org.hibernate.mapping.SimpleValue types. Our point of interest is the method SimpleValue.getType() - in this method is defined what type will be used to convert properties values back-and-forth while working with DB
Type result = TypeFactory.heuristicType(typeName, typeParameters);
At this point I understand that I am unable to modify BASIC_TYPES - so the only way - to replace SimpleValue object to the properties of java.util.Date types to my custom Object that will be able to know the exact type to convert.
The solution:
Create custom container entity manager factory by extending HibernatePersistence class and overriding its method createContainerEntityManagerFactory:
public class HibernatePersistenceExtensions extends HibernatePersistence {
#Override
public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map map) {
if ("true".equals(map.get("hibernate.use.custom.entity.manager.factory"))) {
return CustomeEntityManagerFactoryFactory.createCustomEntityManagerFactory(info, map);
} else {
return super.createContainerEntityManagerFactory(info, map);
}
}
}
Create Hibernate configuration object, modify value ojects for java.util.Date properties and then create custom entity manager factory.
public class ReattachingEntityManagerFactoryFactory {
#SuppressWarnings("rawtypes")
public static EntityManagerFactory createContainerEntityManagerFactory(
PersistenceUnitInfo info, Map map) {
Ejb3Configuration cfg = new Ejb3Configuration();
Ejb3Configuration configured = cfg.configure( info, map );
handleClassMappings(cfg, map);
return configured != null ? configured.buildEntityManagerFactory() : null;
}
#SuppressWarnings("rawtypes")
private static void handleClassMappings(Ejb3Configuration cfg, Map map) {
Iterator clsMappings = cfg.getClassMappings();
while(clsMappings.hasNext()){
PersistentClass mapping = (PersistentClass) clsMappings.next();
handleProperties(mapping.getPropertyIterator(), map);
}
}
private static void handleProperties(Iterator props, Map map) {
while(props.hasNext()){
Property prop = (Property) props.next();
Value value = prop.getValue();
if (value instanceof Component) {
Component c = (Component) value;
handleProperties(c.getPropertyIterator(), map);
} else {
handleReturnUtilDateInsteadOfTimestamp(prop, map);
}
}
private static void handleReturnUtilDateInsteadOfTimestamp(Property prop, Map map) {
if ("true".equals(map.get("hibernate.return.date.instead.of.timestamp"))) {
Value value = prop.getValue();
if (value instanceof SimpleValue) {
SimpleValue simpleValue = (SimpleValue) value;
String typeName = simpleValue.getTypeName();
if ("java.util.Date".equals(typeName)) {
UtilDateSimpleValue udsv = new UtilDateSimpleValue(simpleValue);
prop.setValue(udsv);
}
}
}
}
}
As you can see I just iterate over every property and substitute SimpleValue-object for UtilDateSimpleValue for properties of type java.util.Date. This is very simple class - it implements the same interface as SimpleValue object, e.g org.hibernate.mapping.KeyValue. In constructor original SimpleValue object is passed - so every call to UtilDateSimpleValue is redirected to the original object with one exception - method getType(...) return my custom Type.
public class UtilDateSimpleValue implements KeyValue{
private SimpleValue value;
public UtilDateSimpleValue(SimpleValue value) {
this.value = value;
}
public SimpleValue getValue() {
return value;
}
#Override
public int getColumnSpan() {
return value.getColumnSpan();
}
...
#Override
public Type getType() throws MappingException {
final String typeName = value.getTypeName();
if (typeName == null) {
throw new MappingException("No type name");
}
Type result = new UtilDateUserType();
return result;
}
...
}
And the last step is implementation of UtilDateUserType. I just extend original org.hibernate.type.TimestampType and override its method get() like this:
public class UtilDateUserType extends TimestampType{
#Override
public Object get(ResultSet rs, String name) throws SQLException {
Timestamp ts = rs.getTimestamp(name);
Date result = null;
if(ts != null){
result = new Date(ts.getTime());
}
return result;
}
}
That is all. A little bit tricky, but now every java.util.Date property is returned as java.util.Date without any additional modifications of existing code (annotations or modifying setters). As I find out in Hibernate 4 or above there is a much more easier way to substitute your own type (see details here: Hibernate TypeResolver). Any suggestions or criticism are welcome.
A simple alternative to using a custom UserType is to construct a new java.util.Date in the setter for the date property in your persisted bean, eg:
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.Column;
#Entity
public class Purchase {
private Date date;
#Column
public Date getDate() {
return this.date;
}
public void setDate(Date date) {
// force java.sql.Timestamp to be set as a java.util.Date
this.date = new Date(date.getTime());
}
}
Approaches 1 and 2 obviously don't work, because you get java.sql.Date objects, per JPA/Hibernate spec, and not java.util.Date. From approaches 3 and 4, I would rather choose the latter one, because it's more declarative, and will work with both field and getter annotations.
You have already laid out the solution 4 in your referenced blog post, as #tscho was kind to point out. Maybe defaultForType (see below) should give you the centralized solution you were looking for. Of course will will still need to differentiate between date (without time) and timestamp fields.
For future reference I will leave the summary of using your own Hibernate UserType here:
To make Hibernate give you java.util.Date instances, you can use the #Type and #TypeDef annotations to define a different mapping of your java.util.Date java types to and from the database.
See the examples in the core reference manual here.
Implement a UserType to do the actual plumbing (conversion to/from java.util.Date), named e.g. TimestampAsJavaUtilDateType
Add a #TypeDef annotation on one entity or in a package-info.java - both will be available globally for the session factory (see manual link above). You can use defaultForType to apply the type conversion on all mapped fields of type java.util.Date.
#TypeDef
name = "timestampAsJavaUtilDate",
defaultForType = java.util.Date.class, /* applied globally */
typeClass = TimestampAsJavaUtilDateType.class
)
Optionally, instead of defaultForType, you can annotate your fields/getters with #Type individually:
#Entity
public class MyEntity {
[...]
#Type(type="timestampAsJavaUtilDate")
private java.util.Date myDate;
[...]
}
P.S. To suggest a totally different approach: we usually just don't compare Date objects using equals() anyway. Instead we use a utility class with methods to compare e.g. only the calendar date of two Date instances (or another resolution such as seconds), regardless of the exact implementation type. That as worked well for us.
Here is solution for Hibernate 4.3.7.Final.
pacakge-info.java contains
#TypeDefs(
{
#TypeDef(
name = "javaUtilDateType",
defaultForType = java.util.Date.class,
typeClass = JavaUtilDateType.class
)
})
package some.pack;
import org.hibernate.annotations.TypeDef;
import org.hibernate.annotations.TypeDefs;
And JavaUtilDateType:
package some.other.or.same.pack;
import java.sql.Timestamp;
import java.util.Comparator;
import java.util.Date;
import org.hibernate.HibernateException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.LiteralType;
import org.hibernate.type.StringType;
import org.hibernate.type.TimestampType;
import org.hibernate.type.VersionType;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JdbcTimestampTypeDescriptor;
import org.hibernate.type.descriptor.sql.TimestampTypeDescriptor;
/**
* Note: Depends on hibernate implementation details hibernate-core-4.3.7.Final.
*
* #see
* <a href="http://docs.jboss.org/hibernate/orm/4.3/manual/en-US/html/ch06.html#types-custom">Hibernate
* Documentation</a>
* #see TimestampType
*/
public class JavaUtilDateType
extends AbstractSingleColumnStandardBasicType<Date>
implements VersionType<Date>, LiteralType<Date> {
public static final TimestampType INSTANCE = new TimestampType();
public JavaUtilDateType() {
super(
TimestampTypeDescriptor.INSTANCE,
new JdbcTimestampTypeDescriptor() {
#Override
public Date fromString(String string) {
return new Date(super.fromString(string).getTime());
}
#Override
public <X> Date wrap(X value, WrapperOptions options) {
return new Date(super.wrap(value, options).getTime());
}
}
);
}
#Override
public String getName() {
return "timestamp";
}
#Override
public String[] getRegistrationKeys() {
return new String[]{getName(), Timestamp.class.getName(), java.util.Date.class.getName()};
}
#Override
public Date next(Date current, SessionImplementor session) {
return seed(session);
}
#Override
public Date seed(SessionImplementor session) {
return new Timestamp(System.currentTimeMillis());
}
#Override
public Comparator<Date> getComparator() {
return getJavaTypeDescriptor().getComparator();
}
#Override
public String objectToSQLString(Date value, Dialect dialect) throws Exception {
final Timestamp ts = Timestamp.class.isInstance(value)
? (Timestamp) value
: new Timestamp(value.getTime());
// TODO : use JDBC date literal escape syntax? -> {d 'date-string'} in yyyy-mm-dd hh:mm:ss[.f...] format
return StringType.INSTANCE.objectToSQLString(ts.toString(), dialect);
}
#Override
public Date fromStringValue(String xml) throws HibernateException {
return fromString(xml);
}
}
This solution mostly relies on TimestampType implementation with adding additional behaviour through anonymous class of type JdbcTimestampTypeDescriptor.
There are some classes in the Java platform libraries that do extend an instantiable
class and add a value component. For example, java.sql.Timestamp
extends java.util.Date and adds a nanoseconds field. The equals implementation
for Timestamp does violate symmetry and can cause erratic behavior if
Timestamp and Date objects are used in the same collection or are otherwise intermixed.
The Timestamp class has a disclaimer cautioning programmers against
mixing dates and timestamps. While you won’t get into trouble as long as you
keep them separate, there’s nothing to prevent you from mixing them, and the
resulting errors can be hard to debug. This behavior of the Timestamp class was a
mistake and should not be emulated.
check out this link
http://blogs.sourceallies.com/2012/02/hibernate-date-vs-timestamp/
Just add the this annotation #Temporal(TemporalType.DATE) for a java.util.Date field in your entity class.
More information available in this stackoverflow answer.
I ran into a problem with this as well as my JUnit assertEquals were failing comparing Dates to Hibernate emitted 'java.util.Date' types (which as described in the question are really Timestamps). It turns out that by changing the mapping to 'date' rather than 'java.util.Date' Hibernate generates java.util.Date members. I am using an XML mapping file with Hibernate version 4.1.12.
This version emits 'java.util.Timestamp':
<property name="date" column="DAY" type="java.util.Date" unique-key="KONSTRAINT_DATE_IDX" unique="false" not-null="true" />
This version emits 'java.util.Date':
<property name="date" column="DAY" type="date" unique-key="KONSTRAINT_DATE_IDX" unique="false" not-null="true" />
Note, however, if Hibernate is used to generate the DDL, then these will generate different SQL types (Date for 'date' and Timestamp for 'java.util.Date').
Use #Type annotation on Hibernate entity Filed to customize your mapping with DB object and java object
Visit: https://www.baeldung.com/hibernate-custom-types
I work on an application that has been converted from pure JDBC to Spring template with row mapper. The issue that I have is that the column in database doesn't match the property names which prevent me from using BeanPropertyRowMapper easily.
I saw some posts about using aliases in queries. This would work but it makes it impossible to do a SELECT *
Isn't there any annotation that can be used with BeanPropertyRowMapper as #Column from JPA?
I saw Some posts about using aliases in queries
This is actually an approach suggested in JavaDocs:
To facilitate mapping between columns and fields that don't have matching names, try using column aliases in the SQL statement like "select fname as first_name from customer".
From: BeanPropertyRowMapper.
impossible to do a SELECT *
Please do not use SELECT *. This makes you vulnerable to any database schema change, including completely backward compatible ones like adding or rearranging columns.
Isn't there any annotation that can be used with BeanPropertyRowMapper as #Column from JPA?
Yes, it is called jpa, hibernate and maybe ibatis. Seriously, either use aliases or implement your own RowMapper, Spring is not a full-featured orm.
You can override the BeanPropertyRowMapper.underscoreName, and get the name of the Column annotation to mapping the field with #Column(name = "EXAMPLE_KEY") in the PropertyDescriptor(getter/setter binding).
#Slf4j
public class ColumnRowMapper<T> extends BeanPropertyRowMapper<T> {
private ColumnRowMapper(final Class<T> mappedClass)
{
super(mappedClass);
}
#Override
protected String underscoreName(final String name)
{
final Column annotation;
final String columnName;
Field declaredField = null;
try
{
declaredField = getMappedClass().getDeclaredField(name);
}
catch (NoSuchFieldException | SecurityException e)
{
log.warn("Ups, field «{}» not found in «{}».", name, getMappedClass());
}
if (declaredField == null || (annotation = declaredField.getAnnotation(Column.class)) == null
|| StringUtils.isEmpty(columnName = annotation.name()))
{
return super.underscoreName(name);
}
return StringUtils.lowerCase(columnName);
}
/**
* New instance.
*
* #param <T> the generic type
* #param mappedClass the mapped class
* #return the bean property row mapper
*/
public static <T> BeanPropertyRowMapper<T> newInstance(final Class<T> mappedClass)
{
return new ColumnRowMapper<>(mappedClass);
}
}
A version of above mapper but with early initiation of mapping index, since reflection is way too slow:
import java.lang.reflect.Field;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.persistence.Column;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
#Slf4j
public class ColumnRowMapper<T> extends BeanPropertyRowMapper<T> {
private Map<String, String> columnIndex;
private ColumnRowMapper(final Class<T> mappedClass)
{
super(mappedClass);
}
#Override
protected void initialize(Class<T> mappedClass) {
columnIndex = new ConcurrentHashMap<>();
for (Field f: mappedClass.getDeclaredFields()) {
String fieldName = f.getName();
Column annotation = f.getAnnotation(Column.class);
if (annotation == null) {
continue;
}
String columnName = annotation.name();
if (StringUtils.isEmpty(columnName)) {
continue;
}
columnIndex.put(fieldName, StringUtils.lowerCase(columnName));
}
super.initialize(mappedClass);
}
#Override
protected #NonNull String underscoreName(final #NonNull String name)
{
if (columnIndex.containsKey(name)) {
return columnIndex.get(name);
}
return super.underscoreName(name);
}
/**
* New instance.
*
* #param <T> the generic type
* #param mappedClass the mapped class
* #return the bean property row mapper
*/
public static <T> BeanPropertyRowMapper<T> newInstance(final Class<T> mappedClass)
{
return new ColumnRowMapper<>(mappedClass);
}
}